From dd6cf19c3909a9c81168128b389492f5228202a3 Mon Sep 17 00:00:00 2001 From: Elijah DeLee Date: Mon, 14 Feb 2022 11:01:30 -0500 Subject: [PATCH 001/125] update steps for using custom operator Updating this to use the new make commands in the operator repo --- docs/development/minikube.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/development/minikube.md b/docs/development/minikube.md index eef4e31c93..f18b72109d 100644 --- a/docs/development/minikube.md +++ b/docs/development/minikube.md @@ -52,12 +52,12 @@ of the awx-operator repo. If not, continue to the next section. ### Building and Deploying a Custom AWX Operator Image ``` -$ operator-sdk build quay.io//awx-operator -$ docker push quay.io//awx-operator -$ ansible-playbook ansible/deploy-operator.yml \ - -e pull_policy=Always \ - -e operator_image=quay.io//awx-operator \ - -e operator_version=latest +# in awx-operator repo on the branch you want to use +$ export IMAGE_TAG_BASE=quay.io//awx-operator +$ export VERSION= +$ make docker-build +$ docker push ${IMAGE_TAG_BASE}:${VERSION} +$ make deploy ``` ## Deploy AWX into Minikube using the AWX Operator From 301818003da7d7f32d4052de70322f81b81bc37d Mon Sep 17 00:00:00 2001 From: dluong Date: Tue, 15 Feb 2022 16:03:20 -0500 Subject: [PATCH 002/125] Fixed doc string for Container Groups credential type --- awx_collection/plugins/modules/instance_group.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx_collection/plugins/modules/instance_group.py b/awx_collection/plugins/modules/instance_group.py index 5daba3a975..75eb48a9dd 100644 --- a/awx_collection/plugins/modules/instance_group.py +++ b/awx_collection/plugins/modules/instance_group.py @@ -33,7 +33,7 @@ options: type: str credential: description: - - Credential to authenticate with Kubernetes or OpenShift. Must be of type "Kubernetes/OpenShift API Bearer Token". + - Credential to authenticate with Kubernetes or OpenShift. Must be of type "OpenShift or Kubernetes API Bearer Token". required: False type: str is_container_group: From 1246b14e7eb841494d6d8fe046b40bd8dba285ac Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Tue, 4 Jan 2022 07:34:03 -0800 Subject: [PATCH 003/125] WIP add network visualizer to Controller UI. --- awx/ui/src/api/index.js | 3 + awx/ui/src/api/models/Mesh.js | 9 + awx/ui/src/routeConfig.js | 6 + awx/ui/src/screens/TopologyView/MeshGraph.js | 252 ++++++++++++++++++ .../src/screens/TopologyView/TopologyView.js | 45 ++++ awx/ui/src/screens/TopologyView/index.js | 1 + 6 files changed, 316 insertions(+) create mode 100644 awx/ui/src/api/models/Mesh.js create mode 100644 awx/ui/src/screens/TopologyView/MeshGraph.js create mode 100644 awx/ui/src/screens/TopologyView/TopologyView.js create mode 100644 awx/ui/src/screens/TopologyView/index.js diff --git a/awx/ui/src/api/index.js b/awx/ui/src/api/index.js index a098f28781..5281ad861d 100644 --- a/awx/ui/src/api/index.js +++ b/awx/ui/src/api/index.js @@ -21,6 +21,7 @@ import Jobs from './models/Jobs'; import JobEvents from './models/JobEvents'; import Labels from './models/Labels'; import Me from './models/Me'; +import Mesh from './models/Mesh'; import Metrics from './models/Metrics'; import NotificationTemplates from './models/NotificationTemplates'; import Notifications from './models/Notifications'; @@ -67,6 +68,7 @@ const JobsAPI = new Jobs(); const JobEventsAPI = new JobEvents(); const LabelsAPI = new Labels(); const MeAPI = new Me(); +const MeshAPI = new Mesh(); const MetricsAPI = new Metrics(); const NotificationTemplatesAPI = new NotificationTemplates(); const NotificationsAPI = new Notifications(); @@ -114,6 +116,7 @@ export { JobEventsAPI, LabelsAPI, MeAPI, + MeshAPI, MetricsAPI, NotificationTemplatesAPI, NotificationsAPI, diff --git a/awx/ui/src/api/models/Mesh.js b/awx/ui/src/api/models/Mesh.js new file mode 100644 index 0000000000..d7ad08067c --- /dev/null +++ b/awx/ui/src/api/models/Mesh.js @@ -0,0 +1,9 @@ +import Base from '../Base'; + +class Mesh extends Base { + constructor(http) { + super(http); + this.baseUrl = '/api/v2/mesh_visualizer/'; + } +} +export default Mesh; diff --git a/awx/ui/src/routeConfig.js b/awx/ui/src/routeConfig.js index 339e52a228..f945e9ea79 100644 --- a/awx/ui/src/routeConfig.js +++ b/awx/ui/src/routeConfig.js @@ -19,6 +19,7 @@ import Schedules from 'screens/Schedule'; import Settings from 'screens/Setting'; import Teams from 'screens/Team'; import Templates from 'screens/Template'; +import TopologyView from 'screens/TopologyView'; import Users from 'screens/User'; import WorkflowApprovals from 'screens/WorkflowApproval'; import { Jobs } from 'screens/Job'; @@ -147,6 +148,11 @@ function getRouteConfig(userProfile = {}) { path: '/execution_environments', screen: ExecutionEnvironments, }, + { + title: Topology View, + path: '/topology_view', + screen: TopologyView, + }, ], }, { diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js new file mode 100644 index 0000000000..c1ac487532 --- /dev/null +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -0,0 +1,252 @@ +import React, { useEffect, useCallback } from 'react'; +import { t } from '@lingui/macro'; +import * as d3 from 'd3'; + +function MeshGraph({ data }) { + console.log('data', data); + const draw = useCallback(() => { + const margin = 80; + const getWidth = () => { + let width; + // This is in an a try/catch due to an error from jest. + // Even though the d3.select returns a valid selector with + // style function, it says it is null in the test + try { + width = + parseInt(d3.select(`#chart`).style('width'), 10) - margin || 700; + } catch (error) { + width = 700; + } + + return width; + }; + const width = getWidth(); + const height = 600; + + /* Add SVG */ + d3.selectAll(`#chart > *`).remove(); + + const svg = d3 + .select('#chart') + .append('svg') + .attr('width', `${width + margin}px`) + .attr('height', `${height + margin}px`) + .append('g') + .attr('transform', `translate(${margin}, ${margin})`); + + const color = d3.scaleOrdinal(d3.schemeCategory10); + + const simulation = d3 + .forceSimulation() + .force( + 'link', + d3.forceLink().id(function (d) { + return d.hostname; + }) + ) + .force('charge', d3.forceManyBody().strength(-350)) + .force( + 'collide', + d3.forceCollide(function (d) { + return d.node_type === 'execution' || d.node_type === 'hop' + ? 75 + : 100; + }) + ) + .force('center', d3.forceCenter(width / 2, height / 2)); + + const graph = data; + + const link = svg + .append('g') + .attr('class', 'links') + .selectAll('path') + .data(graph.links) + .enter() + .append('path') + .style('fill', 'none') + .style('stroke', '#ccc') + .style('stroke-width', '2px') + .attr('pointer-events', 'visibleStroke') + .on('mouseover', function (event, d) { + tooltip + .html(`source: ${d.source.hostname}
target: ${d.target.hostname}`) + .style('visibility', 'visible'); + d3.select(this).transition().style('cursor', 'pointer'); + }) + .on('mousemove', function () { + tooltip + .style('top', event.pageY - 10 + 'px') + .style('left', event.pageX + 10 + 'px'); + }) + .on('mouseout', function () { + tooltip.html(``).style('visibility', 'hidden'); + }); + + const node = svg + .append('g') + .attr('class', 'nodes') + .selectAll('g') + .data(graph.nodes) + .enter() + .append('g') + .on('mouseover', function (event, d) { + tooltip + .html( + `name: ${d.hostname}
type: ${d.node_type}
status: ${d.node_state}` + ) + .style('visibility', 'visible'); + // d3.select(this).transition().attr('r', 9).style('cursor', 'pointer'); + }) + .on('mousemove', function () { + tooltip + .style('top', event.pageY - 10 + 'px') + .style('left', event.pageX + 10 + 'px'); + }) + .on('mouseout', function () { + tooltip.html(``).style('visibility', 'hidden'); + // d3.select(this).attr('r', 6); + }); + + const healthRings = node + .append('circle') + .attr('r', 8) + .attr('class', (d) => d.node_state) + .attr('stroke', d => d.node_state === 'disabled' ? '#c6c6c6' : '#50D050') + .attr('fill', d => d.node_state === 'disabled' ? '#c6c6c6' : '#50D050'); + + const nodeRings = node + .append('circle') + .attr('r', 6) + .attr('class', (d) => d.node_type) + .attr('fill', function (d) { + return color(d.node_type); + }); + svg.call(expandGlow); + + const legend = svg + .append('g') + .attr('class', 'chart-legend') + .selectAll('g') + .data(graph.nodes) + .enter() + .append('circle') + .attr('cx', 10) + .attr('cy', function (d, i) { + return 100 + i * 25; + }) + .attr('r', 7) + .attr('class', (d) => d.node_type) + .style('fill', function (d) { + return color(d.node_type); + }); + + const legend_text = svg + .append('g') + .attr('class', 'chart-text') + .selectAll('g') + .data(graph.nodes) + .enter() + .append('text') + .attr('x', 20) + .attr('y', function (d, i) { + return 100 + i * 25; + }) + .text((d) => `${d.hostname} - ${d.node_type}`) + .attr('text-anchor', 'left') + .style('alignment-baseline', 'middle'); + + const tooltip = d3 + .select('#chart') + .append('div') + .attr('class', 'd3-tooltip') + .style('position', 'absolute') + .style('z-index', '10') + .style('visibility', 'hidden') + .style('padding', '15px') + .style('background', 'rgba(0,0,0,0.6)') + .style('border-radius', '5px') + .style('color', '#fff') + .style('font-family', 'sans-serif') + .text('a simple tooltip'); + + const labels = node + .append('text') + .text(function (d) { + return d.hostname; + }) + .attr('x', 16) + .attr('y', 3); + + simulation.nodes(graph.nodes).on('tick', ticked); + simulation.force('link').links(graph.links); + + function ticked() { + link.attr('d', linkArc); + node.attr('transform', function (d) { + return 'translate(' + d.x + ',' + d.y + ')'; + }); + } + + function linkArc(d) { + var dx = d.target.x - d.source.x, + dy = d.target.y - d.source.y, + dr = Math.sqrt(dx * dx + dy * dy); + return ( + 'M' + + d.source.x + + ',' + + d.source.y + + 'A' + + dr + + ',' + + dr + + ' 0 0,1 ' + + d.target.x + + ',' + + d.target.y + ); + } + + function contractGlow() { + healthRings + .transition() + .duration(1000) + .attr('stroke-width', '1px') + .on('end', expandGlow); + } + + function expandGlow() { + healthRings + .transition() + .duration(1000) + .attr('stroke-width', '4.5px') + .on('end', contractGlow); + } + + const zoom = d3 + .zoom() + .scaleExtent([1, 8]) + .on('zoom', function (event) { + svg.selectAll('.links, .nodes').attr('transform', event.transform); + }); + + svg.call(zoom); + }, [data]); + + useEffect(() => { + function handleResize() { + draw(); + } + + window.addEventListener('resize', handleResize); + + handleResize(); + + return () => window.removeEventListener('resize', handleResize); + }, [draw]); + + return
; +} + +export default MeshGraph; diff --git a/awx/ui/src/screens/TopologyView/TopologyView.js b/awx/ui/src/screens/TopologyView/TopologyView.js new file mode 100644 index 0000000000..be43df6ad8 --- /dev/null +++ b/awx/ui/src/screens/TopologyView/TopologyView.js @@ -0,0 +1,45 @@ +import React, { useEffect, useCallback } from 'react'; + +import { t } from '@lingui/macro'; +import ScreenHeader from 'components/ScreenHeader/ScreenHeader'; +import { + PageSection, + Card, + CardHeader, + CardBody, +} from '@patternfly/react-core'; +import MeshGraph from './MeshGraph'; +import useRequest from 'hooks/useRequest'; +import { MeshAPI } from 'api'; + +function TopologyView() { + const { + result: { meshData }, + error: fetchInitialError, + request: fetchMeshVisualizer, + } = useRequest( + useCallback(async () => { + const { data } = await MeshAPI.read(); + return { + meshData: data, + }; + }, []), + { meshData: { nodes: [], links: [] } } + ); + useEffect(() => { + fetchMeshVisualizer(); + }, [fetchMeshVisualizer]); + return ( + <> + + + + + {meshData && } + + + + ); +} + +export default TopologyView; diff --git a/awx/ui/src/screens/TopologyView/index.js b/awx/ui/src/screens/TopologyView/index.js new file mode 100644 index 0000000000..b0983be986 --- /dev/null +++ b/awx/ui/src/screens/TopologyView/index.js @@ -0,0 +1 @@ +export { default } from './TopologyView'; From 826a069be01524ef253c48db8c53fa3666a7acd0 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Tue, 18 Jan 2022 09:41:16 -0800 Subject: [PATCH 004/125] Highlight immediate siblings on hover. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 270 ++++++++++++++++--- 1 file changed, 226 insertions(+), 44 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index c1ac487532..52747d0a81 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -2,8 +2,102 @@ import React, { useEffect, useCallback } from 'react'; import { t } from '@lingui/macro'; import * as d3 from 'd3'; -function MeshGraph({ data }) { - console.log('data', data); +// function MeshGraph({ data }) { +function MeshGraph() { + const data = { + nodes: [ + { + hostname: "aapc1.local", + node_state: "healthy", + node_type: "control", + id: 1 + }, + { + hostname: "aapc2.local", + node_type: "control", + node_state: "disabled", + id: 2 + }, + { + hostname: "aapc3.local", + node_type: "control", + node_state: "healthy", + id: 3 + }, + { + hostname: "aape1.local", + node_type: "execution", + node_state: "error", + id: 4 + }, + { + hostname: "aape2.local", + node_type: "execution", + node_state: "error", + id: 5 + }, + { + hostname: "aape3.local", + node_type: "execution", + node_state: "healthy", + id: 6 + }, + { + hostname: "aape4.local", + node_type: "execution", + node_state: "healthy", + id: 7 + }, + { + hostname: "aaph1.local", + node_type: "hop", + node_state: "disabled", + id: 8 + }, + { + hostname: "aaph2.local", + node_type: "hop", + node_state: "healthy", + id: 9 + }, + { + hostname: "aaph3.local", + node_type: "hop", + node_state: "error", + id: 10 + } + ], + links: [ + { source: "aapc1.local", target: "aapc2.local" }, + { source: "aapc1.local", target: "aapc3.local" }, + { source: "aapc1.local", target: "aape1.local" }, + { source: "aapc1.local", target: "aape2.local" }, + + { source: "aapc2.local", target: "aapc3.local" }, + { source: "aapc2.local", target: "aape1.local" }, + { source: "aapc2.local", target: "aape2.local" }, + + { source: "aapc3.local", target: "aape1.local" }, + { source: "aapc3.local", target: "aape2.local" }, + + { source: "aape3.local", target: "aaph1.local" }, + { source: "aape3.local", target: "aaph2.local" }, + + { source: "aape4.local", target: "aaph3.local" }, + + { source: "aaph1.local", target: "aapc1.local" }, + { source: "aaph1.local", target: "aapc2.local" }, + { source: "aaph1.local", target: "aapc3.local" }, + + { source: "aaph2.local", target: "aapc1.local" }, + { source: "aaph2.local", target: "aapc2.local" }, + { source: "aaph2.local", target: "aapc3.local" }, + + { source: "aaph3.local", target: "aaph1.local" }, + { source: "aaph3.local", target: "aaph2.local" } + ] + }; + const draw = useCallback(() => { const margin = 80; const getWidth = () => { @@ -22,6 +116,15 @@ function MeshGraph({ data }) { }; const width = getWidth(); const height = 600; + const defaultRadius = 6; + const highlightRadius = 9; + + const zoom = d3 + .zoom() + .scaleExtent([1, 8]) + .on('zoom', function (event) { + svg.selectAll('.links, .nodes').attr('transform', event.transform); + }); /* Add SVG */ d3.selectAll(`#chart > *`).remove(); @@ -32,9 +135,11 @@ function MeshGraph({ data }) { .attr('width', `${width + margin}px`) .attr('height', `${height + margin}px`) .append('g') - .attr('transform', `translate(${margin}, ${margin})`); + .attr('transform', `translate(${margin}, ${margin})`) + .call(zoom); const color = d3.scaleOrdinal(d3.schemeCategory10); + const graph = data; const simulation = d3 .forceSimulation() @@ -55,32 +160,31 @@ function MeshGraph({ data }) { ) .force('center', d3.forceCenter(width / 2, height / 2)); - const graph = data; - const link = svg .append('g') - .attr('class', 'links') + .attr('class', `links`) .selectAll('path') .data(graph.links) .enter() .append('path') + .attr('class', (d, i) => `link-${i}`) .style('fill', 'none') .style('stroke', '#ccc') .style('stroke-width', '2px') - .attr('pointer-events', 'visibleStroke') + .attr('pointer-events', 'none') .on('mouseover', function (event, d) { - tooltip - .html(`source: ${d.source.hostname}
target: ${d.target.hostname}`) - .style('visibility', 'visible'); + // tooltip + // .html(`source: ${d.source.hostname}
target: ${d.target.hostname}`) + // .style('visibility', 'visible'); d3.select(this).transition().style('cursor', 'pointer'); }) .on('mousemove', function () { - tooltip - .style('top', event.pageY - 10 + 'px') - .style('left', event.pageX + 10 + 'px'); + // tooltip + // .style('top', event.pageY - 10 + 'px') + // .style('left', event.pageX + 10 + 'px'); }) .on('mouseout', function () { - tooltip.html(``).style('visibility', 'hidden'); + // tooltip.html(``).style('visibility', 'hidden'); }); const node = svg @@ -90,52 +194,64 @@ function MeshGraph({ data }) { .data(graph.nodes) .enter() .append('g') - .on('mouseover', function (event, d) { + .on('mouseenter', function (event, d) { + d3.select(this).transition().style('cursor', 'pointer'); + highlightSiblings(d) tooltip .html( - `name: ${d.hostname}
type: ${d.node_type}
status: ${d.node_state}` + `

Details


name: ${d.hostname}
type: ${d.node_type}
status: ${d.node_state}
Click on a node to view the details` ) - .style('visibility', 'visible'); + .style('visibility', 'visible') + // .style('visibility', 'visible'); // d3.select(this).transition().attr('r', 9).style('cursor', 'pointer'); }) .on('mousemove', function () { - tooltip - .style('top', event.pageY - 10 + 'px') - .style('left', event.pageX + 10 + 'px'); + // tooltip + // .style('top', event.pageY - 10 + 'px') + // .style('left', event.pageX + 10 + 'px'); }) - .on('mouseout', function () { + .on('mouseleave', function (event, d) { + deselectSiblings(d) tooltip.html(``).style('visibility', 'hidden'); // d3.select(this).attr('r', 6); }); - + const healthRings = node .append('circle') .attr('r', 8) .attr('class', (d) => d.node_state) - .attr('stroke', d => d.node_state === 'disabled' ? '#c6c6c6' : '#50D050') - .attr('fill', d => d.node_state === 'disabled' ? '#c6c6c6' : '#50D050'); - + .attr('stroke', d => renderHealthColor(d.node_state)) + .attr('fill', d => renderHealthColor(d.node_state)); + const nodeRings = node .append('circle') - .attr('r', 6) + .attr('r', defaultRadius) .attr('class', (d) => d.node_type) + .attr('class', (d) => `id-${d.id}`) .attr('fill', function (d) { return color(d.node_type); - }); + }) + .attr('stroke', 'white'); svg.call(expandGlow); const legend = svg + .append('text') + .attr('x', 10) + .attr('y', 20) + .text('Legend') + + svg .append('g') - .attr('class', 'chart-legend') .selectAll('g') + .attr('class', 'chart-legend') .data(graph.nodes) .enter() .append('circle') .attr('cx', 10) .attr('cy', function (d, i) { - return 100 + i * 25; + return 50 + i * 25; }) - .attr('r', 7) + .attr('r', defaultRadius) .attr('class', (d) => d.node_type) .style('fill', function (d) { return color(d.node_type); @@ -150,7 +266,7 @@ function MeshGraph({ data }) { .append('text') .attr('x', 20) .attr('y', function (d, i) { - return 100 + i * 25; + return 50 + i * 25; }) .text((d) => `${d.hostname} - ${d.node_type}`) .attr('text-anchor', 'left') @@ -161,14 +277,20 @@ function MeshGraph({ data }) { .append('div') .attr('class', 'd3-tooltip') .style('position', 'absolute') + .style('top', '200px') + .style('right', '40px') .style('z-index', '10') .style('visibility', 'hidden') .style('padding', '15px') - .style('background', 'rgba(0,0,0,0.6)') - .style('border-radius', '5px') - .style('color', '#fff') + // .style('border', '1px solid #e6e6e6') + // .style('box-shadow', '5px 5px 5px #e6e6e6') + .style('max-width', '15%') + // .style('background', 'rgba(0,0,0,0.6)') + // .style('border-radius', '5px') + // .style('color', '#fff') .style('font-family', 'sans-serif') - .text('a simple tooltip'); + .style('color', '#e6e6e') + .text(''); const labels = node .append('text') @@ -183,6 +305,7 @@ function MeshGraph({ data }) { function ticked() { link.attr('d', linkArc); + node.attr('transform', function (d) { return 'translate(' + d.x + ',' + d.y + ')'; }); @@ -209,7 +332,8 @@ function MeshGraph({ data }) { } function contractGlow() { - healthRings + svg + .selectAll('.healthy') .transition() .duration(1000) .attr('stroke-width', '1px') @@ -217,21 +341,79 @@ function MeshGraph({ data }) { } function expandGlow() { - healthRings + svg + .selectAll('.healthy') .transition() .duration(1000) .attr('stroke-width', '4.5px') .on('end', contractGlow); } - const zoom = d3 - .zoom() - .scaleExtent([1, 8]) - .on('zoom', function (event) { - svg.selectAll('.links, .nodes').attr('transform', event.transform); - }); + function renderHealthColor(nodeState) { + const colorKey = { + 'disabled': '#c6c6c6', + 'healthy': '#50D050', + 'error': '#ff6766' + }; + return colorKey[nodeState]; + } - svg.call(zoom); + function renderNodeClass(nodeState) { + const colorKey = { + 'disabled': 'node-disabled', + 'healthy': 'node-healthy', + 'error': 'node-error' + }; + return colorKey[nodeState]; + } + + function highlightSiblings(node) { + setTimeout(function() { + svg.selectAll(`id-${node.id}`) + .attr('r', highlightRadius); + const immediate = graph.links.filter(link => node.hostname === link.source.hostname || node.hostname === link.target.hostname); + immediate.forEach(s => { + const links = svg.selectAll(`.link-${s.index}`) + .transition() + .style('stroke', '#6e6e6e') + const sourceNodes = svg.selectAll(`.id-${s.source.id}`) + .transition() + .attr('r', highlightRadius) + const targetNodes = svg.selectAll(`.id-${s.target.id}`) + .transition() + .attr('r', highlightRadius) + }) + + }, 0) + + } + + function deselectSiblings(node) { + svg.selectAll(`id-${node.id}`) + .attr('r', defaultRadius); + const immediate = graph.links.filter(link => node.hostname === link.source.hostname || node.hostname === link.target.hostname); + immediate.forEach(s => { + const links = svg.selectAll(`.link-${s.index}`) + .transition() + .style('stroke', '#ccc') + svg.selectAll(`.id-${s.source.id}`) + .transition() + .attr('r', defaultRadius) + svg.selectAll(`.id-${s.target.id}`) + .transition() + .attr('r', defaultRadius) + }) + } + // const zoom = d3 + // .zoom() + // .scaleExtent([1, 8]) + // .on('zoom', function (event) { + // svg.selectAll('.links, .nodes').attr('transform', event.transform); + // }); + + // svg.call(zoom); + // node.call(zoom); + // link.call(zoom); }, [data]); useEffect(() => { From 73a5802c1108d25206f0009c8168ba8835cd317e Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Wed, 19 Jan 2022 14:43:33 -0800 Subject: [PATCH 005/125] Lint. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 375 ++++++++---------- .../src/screens/TopologyView/TopologyView.js | 11 +- 2 files changed, 165 insertions(+), 221 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 52747d0a81..77ca7656f4 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -1,102 +1,102 @@ import React, { useEffect, useCallback } from 'react'; -import { t } from '@lingui/macro'; +// import { t } from '@lingui/macro'; import * as d3 from 'd3'; -// function MeshGraph({ data }) { -function MeshGraph() { - const data = { - nodes: [ - { - hostname: "aapc1.local", - node_state: "healthy", - node_type: "control", - id: 1 - }, - { - hostname: "aapc2.local", - node_type: "control", - node_state: "disabled", - id: 2 - }, - { - hostname: "aapc3.local", - node_type: "control", - node_state: "healthy", - id: 3 - }, - { - hostname: "aape1.local", - node_type: "execution", - node_state: "error", - id: 4 - }, - { - hostname: "aape2.local", - node_type: "execution", - node_state: "error", - id: 5 - }, - { - hostname: "aape3.local", - node_type: "execution", - node_state: "healthy", - id: 6 - }, - { - hostname: "aape4.local", - node_type: "execution", - node_state: "healthy", - id: 7 - }, - { - hostname: "aaph1.local", - node_type: "hop", - node_state: "disabled", - id: 8 - }, - { - hostname: "aaph2.local", - node_type: "hop", - node_state: "healthy", - id: 9 - }, - { - hostname: "aaph3.local", - node_type: "hop", - node_state: "error", - id: 10 - } - ], - links: [ - { source: "aapc1.local", target: "aapc2.local" }, - { source: "aapc1.local", target: "aapc3.local" }, - { source: "aapc1.local", target: "aape1.local" }, - { source: "aapc1.local", target: "aape2.local" }, +function MeshGraph({ data }) { + // function MeshGraph() { + // const data = { + // nodes: [ + // { + // hostname: 'aapc1.local', + // node_state: 'healthy', + // node_type: 'control', + // id: 1, + // }, + // { + // hostname: 'aapc2.local', + // node_type: 'control', + // node_state: 'disabled', + // id: 2, + // }, + // { + // hostname: 'aapc3.local', + // node_type: 'control', + // node_state: 'healthy', + // id: 3, + // }, + // { + // hostname: 'aape1.local', + // node_type: 'execution', + // node_state: 'error', + // id: 4, + // }, + // { + // hostname: 'aape2.local', + // node_type: 'execution', + // node_state: 'error', + // id: 5, + // }, + // { + // hostname: 'aape3.local', + // node_type: 'execution', + // node_state: 'healthy', + // id: 6, + // }, + // { + // hostname: 'aape4.local', + // node_type: 'execution', + // node_state: 'healthy', + // id: 7, + // }, + // { + // hostname: 'aaph1.local', + // node_type: 'hop', + // node_state: 'disabled', + // id: 8, + // }, + // { + // hostname: 'aaph2.local', + // node_type: 'hop', + // node_state: 'healthy', + // id: 9, + // }, + // { + // hostname: 'aaph3.local', + // node_type: 'hop', + // node_state: 'error', + // id: 10, + // }, + // ], + // links: [ + // { source: 'aapc1.local', target: 'aapc2.local' }, + // { source: 'aapc1.local', target: 'aapc3.local' }, + // { source: 'aapc1.local', target: 'aape1.local' }, + // { source: 'aapc1.local', target: 'aape2.local' }, - { source: "aapc2.local", target: "aapc3.local" }, - { source: "aapc2.local", target: "aape1.local" }, - { source: "aapc2.local", target: "aape2.local" }, + // { source: 'aapc2.local', target: 'aapc3.local' }, + // { source: 'aapc2.local', target: 'aape1.local' }, + // { source: 'aapc2.local', target: 'aape2.local' }, - { source: "aapc3.local", target: "aape1.local" }, - { source: "aapc3.local", target: "aape2.local" }, + // { source: 'aapc3.local', target: 'aape1.local' }, + // { source: 'aapc3.local', target: 'aape2.local' }, - { source: "aape3.local", target: "aaph1.local" }, - { source: "aape3.local", target: "aaph2.local" }, + // { source: 'aape3.local', target: 'aaph1.local' }, + // { source: 'aape3.local', target: 'aaph2.local' }, - { source: "aape4.local", target: "aaph3.local" }, + // { source: 'aape4.local', target: 'aaph3.local' }, - { source: "aaph1.local", target: "aapc1.local" }, - { source: "aaph1.local", target: "aapc2.local" }, - { source: "aaph1.local", target: "aapc3.local" }, + // { source: 'aaph1.local', target: 'aapc1.local' }, + // { source: 'aaph1.local', target: 'aapc2.local' }, + // { source: 'aaph1.local', target: 'aapc3.local' }, - { source: "aaph2.local", target: "aapc1.local" }, - { source: "aaph2.local", target: "aapc2.local" }, - { source: "aaph2.local", target: "aapc3.local" }, + // { source: 'aaph2.local', target: 'aapc1.local' }, + // { source: 'aaph2.local', target: 'aapc2.local' }, + // { source: 'aaph2.local', target: 'aapc3.local' }, - { source: "aaph3.local", target: "aaph1.local" }, - { source: "aaph3.local", target: "aaph2.local" } - ] - }; + // { source: 'aaph3.local', target: 'aaph1.local' }, + // { source: 'aaph3.local', target: 'aaph2.local' }, + // ], + // }; const draw = useCallback(() => { const margin = 80; @@ -122,7 +122,7 @@ function MeshGraph() { const zoom = d3 .zoom() .scaleExtent([1, 8]) - .on('zoom', function (event) { + .on('zoom', (event) => { svg.selectAll('.links, .nodes').attr('transform', event.transform); }); @@ -145,18 +145,14 @@ function MeshGraph() { .forceSimulation() .force( 'link', - d3.forceLink().id(function (d) { - return d.hostname; - }) + d3.forceLink().id((d) => d.hostname) ) .force('charge', d3.forceManyBody().strength(-350)) .force( 'collide', - d3.forceCollide(function (d) { - return d.node_type === 'execution' || d.node_type === 'hop' - ? 75 - : 100; - }) + d3.forceCollide((d) => + d.node_type === 'execution' || d.node_type === 'hop' ? 75 : 100 + ) ) .force('center', d3.forceCenter(width / 2, height / 2)); @@ -172,19 +168,8 @@ function MeshGraph() { .style('stroke', '#ccc') .style('stroke-width', '2px') .attr('pointer-events', 'none') - .on('mouseover', function (event, d) { - // tooltip - // .html(`source: ${d.source.hostname}
target: ${d.target.hostname}`) - // .style('visibility', 'visible'); + .on('mouseover', function showPointer() { d3.select(this).transition().style('cursor', 'pointer'); - }) - .on('mousemove', function () { - // tooltip - // .style('top', event.pageY - 10 + 'px') - // .style('left', event.pageX + 10 + 'px'); - }) - .on('mouseout', function () { - // tooltip.html(``).style('visibility', 'hidden'); }); const node = svg @@ -194,53 +179,42 @@ function MeshGraph() { .data(graph.nodes) .enter() .append('g') - .on('mouseenter', function (event, d) { + .on('mouseenter', function handleNodeHover(_, d) { d3.select(this).transition().style('cursor', 'pointer'); - highlightSiblings(d) + highlightSiblings(d); tooltip .html( `

Details


name: ${d.hostname}
type: ${d.node_type}
status: ${d.node_state}
Click on a node to view the details` ) - .style('visibility', 'visible') - // .style('visibility', 'visible'); - // d3.select(this).transition().attr('r', 9).style('cursor', 'pointer'); + .style('visibility', 'visible'); }) - .on('mousemove', function () { - // tooltip - // .style('top', event.pageY - 10 + 'px') - // .style('left', event.pageX + 10 + 'px'); - }) - .on('mouseleave', function (event, d) { - deselectSiblings(d) + .on('mouseleave', (_, d) => { + deselectSiblings(d); tooltip.html(``).style('visibility', 'hidden'); - // d3.select(this).attr('r', 6); }); - const healthRings = node + // health rings on nodes + node .append('circle') .attr('r', 8) .attr('class', (d) => d.node_state) - .attr('stroke', d => renderHealthColor(d.node_state)) - .attr('fill', d => renderHealthColor(d.node_state)); + .attr('stroke', (d) => renderHealthColor(d.node_state)) + .attr('fill', (d) => renderHealthColor(d.node_state)); - const nodeRings = node + // inner node ring + node .append('circle') .attr('r', defaultRadius) .attr('class', (d) => d.node_type) .attr('class', (d) => `id-${d.id}`) - .attr('fill', function (d) { - return color(d.node_type); - }) + .attr('fill', (d) => color(d.node_type)) .attr('stroke', 'white'); svg.call(expandGlow); - const legend = svg - .append('text') - .attr('x', 10) - .attr('y', 20) - .text('Legend') + // legend + svg.append('text').attr('x', 10).attr('y', 20).text('Legend'); - svg + svg .append('g') .selectAll('g') .attr('class', 'chart-legend') @@ -248,16 +222,13 @@ function MeshGraph() { .enter() .append('circle') .attr('cx', 10) - .attr('cy', function (d, i) { - return 50 + i * 25; - }) + .attr('cy', (d, i) => 50 + i * 25) .attr('r', defaultRadius) .attr('class', (d) => d.node_type) - .style('fill', function (d) { - return color(d.node_type); - }); + .style('fill', (d) => color(d.node_type)); - const legend_text = svg + // legend text + svg .append('g') .attr('class', 'chart-text') .selectAll('g') @@ -265,9 +236,7 @@ function MeshGraph() { .enter() .append('text') .attr('x', 20) - .attr('y', function (d, i) { - return 50 + i * 25; - }) + .attr('y', (d, i) => 50 + i * 25) .text((d) => `${d.hostname} - ${d.node_type}`) .attr('text-anchor', 'left') .style('alignment-baseline', 'middle'); @@ -292,11 +261,10 @@ function MeshGraph() { .style('color', '#e6e6e') .text(''); - const labels = node + // node labels + node .append('text') - .text(function (d) { - return d.hostname; - }) + .text((d) => d.hostname) .attr('x', 16) .attr('y', 3); @@ -306,29 +274,14 @@ function MeshGraph() { function ticked() { link.attr('d', linkArc); - node.attr('transform', function (d) { - return 'translate(' + d.x + ',' + d.y + ')'; - }); + node.attr('transform', (d) => `translate(${d.x},${d.y})`); } function linkArc(d) { - var dx = d.target.x - d.source.x, - dy = d.target.y - d.source.y, - dr = Math.sqrt(dx * dx + dy * dy); - return ( - 'M' + - d.source.x + - ',' + - d.source.y + - 'A' + - dr + - ',' + - dr + - ' 0 0,1 ' + - d.target.x + - ',' + - d.target.y - ); + const dx = d.target.x - d.source.x; + const dy = d.target.y - d.source.y; + const dr = Math.sqrt(dx * dx + dy * dy); + return `M${d.source.x},${d.source.y}A${dr},${dr} 0 0,1 ${d.target.x},${d.target.y}`; } function contractGlow() { @@ -351,58 +304,54 @@ function MeshGraph() { function renderHealthColor(nodeState) { const colorKey = { - 'disabled': '#c6c6c6', - 'healthy': '#50D050', - 'error': '#ff6766' + disabled: '#c6c6c6', + healthy: '#50D050', + error: '#ff6766', }; return colorKey[nodeState]; } - function renderNodeClass(nodeState) { - const colorKey = { - 'disabled': 'node-disabled', - 'healthy': 'node-healthy', - 'error': 'node-error' - }; - return colorKey[nodeState]; + function highlightSiblings(n) { + setTimeout(() => { + svg.selectAll(`id-${n.id}`).attr('r', highlightRadius); + const immediate = graph.links.filter( + (l) => + n.hostname === l.source.hostname || n.hostname === l.target.hostname + ); + immediate.forEach((s) => { + svg + .selectAll(`.link-${s.index}`) + .transition() + .style('stroke', '#6e6e6e'); + svg + .selectAll(`.id-${s.source.id}`) + .transition() + .attr('r', highlightRadius); + svg + .selectAll(`.id-${s.target.id}`) + .transition() + .attr('r', highlightRadius); + }); + }, 0); } - function highlightSiblings(node) { - setTimeout(function() { - svg.selectAll(`id-${node.id}`) - .attr('r', highlightRadius); - const immediate = graph.links.filter(link => node.hostname === link.source.hostname || node.hostname === link.target.hostname); - immediate.forEach(s => { - const links = svg.selectAll(`.link-${s.index}`) + function deselectSiblings(n) { + svg.selectAll(`id-${n.id}`).attr('r', defaultRadius); + const immediate = graph.links.filter( + (l) => + n.hostname === l.source.hostname || n.hostname === l.target.hostname + ); + immediate.forEach((s) => { + svg.selectAll(`.link-${s.index}`).transition().style('stroke', '#ccc'); + svg + .selectAll(`.id-${s.source.id}`) .transition() - .style('stroke', '#6e6e6e') - const sourceNodes = svg.selectAll(`.id-${s.source.id}`) - .transition() - .attr('r', highlightRadius) - const targetNodes = svg.selectAll(`.id-${s.target.id}`) - .transition() - .attr('r', highlightRadius) - }) - - }, 0) - - } - - function deselectSiblings(node) { - svg.selectAll(`id-${node.id}`) .attr('r', defaultRadius); - const immediate = graph.links.filter(link => node.hostname === link.source.hostname || node.hostname === link.target.hostname); - immediate.forEach(s => { - const links = svg.selectAll(`.link-${s.index}`) + svg + .selectAll(`.id-${s.target.id}`) .transition() - .style('stroke', '#ccc') - svg.selectAll(`.id-${s.source.id}`) - .transition() - .attr('r', defaultRadius) - svg.selectAll(`.id-${s.target.id}`) - .transition() - .attr('r', defaultRadius) - }) + .attr('r', defaultRadius); + }); } // const zoom = d3 // .zoom() diff --git a/awx/ui/src/screens/TopologyView/TopologyView.js b/awx/ui/src/screens/TopologyView/TopologyView.js index be43df6ad8..2e62a01e0d 100644 --- a/awx/ui/src/screens/TopologyView/TopologyView.js +++ b/awx/ui/src/screens/TopologyView/TopologyView.js @@ -2,20 +2,15 @@ import React, { useEffect, useCallback } from 'react'; import { t } from '@lingui/macro'; import ScreenHeader from 'components/ScreenHeader/ScreenHeader'; -import { - PageSection, - Card, - CardHeader, - CardBody, -} from '@patternfly/react-core'; -import MeshGraph from './MeshGraph'; +import { PageSection, Card, CardBody } from '@patternfly/react-core'; import useRequest from 'hooks/useRequest'; import { MeshAPI } from 'api'; +import MeshGraph from './MeshGraph'; function TopologyView() { const { result: { meshData }, - error: fetchInitialError, + // error: fetchInitialError, request: fetchMeshVisualizer, } = useRequest( useCallback(async () => { From 5856f805fc657b1eb91ad805dbbf9a4ac93406d7 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 20 Jan 2022 13:45:03 -0800 Subject: [PATCH 006/125] Add debounce to resize event; link to node details. --- awx/ui/src/api/models/Instances.js | 5 ++++ awx/ui/src/screens/TopologyView/MeshGraph.js | 31 +++++++++++++------- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/awx/ui/src/api/models/Instances.js b/awx/ui/src/api/models/Instances.js index 78ea59d1dd..07ee085c14 100644 --- a/awx/ui/src/api/models/Instances.js +++ b/awx/ui/src/api/models/Instances.js @@ -7,6 +7,7 @@ class Instances extends Base { this.readHealthCheckDetail = this.readHealthCheckDetail.bind(this); this.healthCheck = this.healthCheck.bind(this); + this.readInstanceGroup = this.readInstanceGroup.bind(this); } healthCheck(instanceId) { @@ -16,6 +17,10 @@ class Instances extends Base { readHealthCheckDetail(instanceId) { return this.http.get(`${this.baseUrl}${instanceId}/health_check/`); } + + readInstanceGroup(instanceId) { + return this.http.get(`${this.baseUrl}${instanceId}/instance_groups/`); + } } export default Instances; diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 77ca7656f4..3876e1e3ea 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -1,5 +1,8 @@ import React, { useEffect, useCallback } from 'react'; +import debounce from 'util/debounce'; +import { useHistory } from 'react-router-dom'; // import { t } from '@lingui/macro'; +import { InstancesAPI } from 'api'; import * as d3 from 'd3'; function MeshGraph({ data }) { @@ -97,7 +100,7 @@ function MeshGraph({ data }) { // { source: 'aaph3.local', target: 'aaph2.local' }, // ], // }; - + const history = useHistory(); const draw = useCallback(() => { const margin = 80; const getWidth = () => { @@ -191,6 +194,11 @@ function MeshGraph({ data }) { .on('mouseleave', (_, d) => { deselectSiblings(d); tooltip.html(``).style('visibility', 'hidden'); + }) + .on('click', (_, d) => { + if (d.node_type !== 'hop') { + redirectToDetailsPage(d); + } }); // health rings on nodes @@ -353,16 +361,17 @@ function MeshGraph({ data }) { .attr('r', defaultRadius); }); } - // const zoom = d3 - // .zoom() - // .scaleExtent([1, 8]) - // .on('zoom', function (event) { - // svg.selectAll('.links, .nodes').attr('transform', event.transform); - // }); - // svg.call(zoom); - // node.call(zoom); - // link.call(zoom); + async function redirectToDetailsPage({ id: nodeId }) { + const { + data: { results }, + } = await InstancesAPI.readInstanceGroup(nodeId); + const { id: instanceGroupId } = results[0]; + const constructedURL = `/instance_groups/${instanceGroupId}/instances/${nodeId}/details`; + history.push(constructedURL); + } + + // eslint-disable-next-line react-hooks/exhaustive-deps }, [data]); useEffect(() => { @@ -370,7 +379,7 @@ function MeshGraph({ data }) { draw(); } - window.addEventListener('resize', handleResize); + window.addEventListener('resize', debounce(handleResize, 500)); handleResize(); From 0c8c69f04a4e9eb7cba2d41f07b5f6d78d0eeee4 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 20 Jan 2022 13:53:14 -0800 Subject: [PATCH 007/125] Add RBAC for /topology_view endpoint. --- awx/ui/src/routeConfig.js | 1 + 1 file changed, 1 insertion(+) diff --git a/awx/ui/src/routeConfig.js b/awx/ui/src/routeConfig.js index f945e9ea79..76bb2e39a5 100644 --- a/awx/ui/src/routeConfig.js +++ b/awx/ui/src/routeConfig.js @@ -185,6 +185,7 @@ function getRouteConfig(userProfile = {}) { deleteRoute('management_jobs'); if (userProfile?.isOrgAdmin) return routeConfig; deleteRoute('instance_groups'); + deleteRoute('topology_view'); if (!userProfile?.isNotificationAdmin) deleteRoute('notification_templates'); return routeConfig; From 07ccce9845c721928ac689cd75d2475f03d75f60 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 20 Jan 2022 13:58:35 -0800 Subject: [PATCH 008/125] Zoom in/out on entire SVG canvas. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 3876e1e3ea..fb4e9d4abb 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -126,7 +126,7 @@ function MeshGraph({ data }) { .zoom() .scaleExtent([1, 8]) .on('zoom', (event) => { - svg.selectAll('.links, .nodes').attr('transform', event.transform); + svg.attr('transform', event.transform); }); /* Add SVG */ From b8674a3f8c9b83b0fdd306567e63beb641ba37ca Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 20 Jan 2022 14:14:28 -0800 Subject: [PATCH 009/125] Use PF colors for nodes. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index fb4e9d4abb..90ba1ea2ed 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -141,7 +141,6 @@ function MeshGraph({ data }) { .attr('transform', `translate(${margin}, ${margin})`) .call(zoom); - const color = d3.scaleOrdinal(d3.schemeCategory10); const graph = data; const simulation = d3 @@ -215,7 +214,7 @@ function MeshGraph({ data }) { .attr('r', defaultRadius) .attr('class', (d) => d.node_type) .attr('class', (d) => `id-${d.id}`) - .attr('fill', (d) => color(d.node_type)) + .attr('fill', (d) => renderNodeColor(d.node_type)) .attr('stroke', 'white'); svg.call(expandGlow); @@ -233,7 +232,7 @@ function MeshGraph({ data }) { .attr('cy', (d, i) => 50 + i * 25) .attr('r', defaultRadius) .attr('class', (d) => d.node_type) - .style('fill', (d) => color(d.node_type)); + .style('fill', (d) => renderNodeColor(d.node_type)); // legend text svg @@ -319,6 +318,17 @@ function MeshGraph({ data }) { return colorKey[nodeState]; } + function renderNodeColor(nodeType) { + const colorKey = { + hop: '#C46100', + execution: '#F0AB00', + hybrid: '#0066CC', + control: '#005F60' + }; + + return colorKey[nodeType]; + } + function highlightSiblings(n) { setTimeout(() => { svg.selectAll(`id-${n.id}`).attr('r', highlightRadius); From 9fc92ccc52c4ee68f9dedd805047e5152ace4859 Mon Sep 17 00:00:00 2001 From: Tiago Date: Mon, 24 Jan 2022 18:17:04 -0300 Subject: [PATCH 010/125] add data-cy attr --- awx/ui/src/screens/TopologyView/MeshGraph.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 90ba1ea2ed..cbe505eecf 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -161,11 +161,13 @@ function MeshGraph({ data }) { const link = svg .append('g') .attr('class', `links`) + .attr('data-cy', 'links') .selectAll('path') .data(graph.links) .enter() .append('path') .attr('class', (d, i) => `link-${i}`) + .attr('data-cy', (d) => `${d.source}-${d.target}`) .style('fill', 'none') .style('stroke', '#ccc') .style('stroke-width', '2px') @@ -177,6 +179,7 @@ function MeshGraph({ data }) { const node = svg .append('g') .attr('class', 'nodes') + .attr('data-cy', 'nodes') .selectAll('g') .data(graph.nodes) .enter() @@ -225,6 +228,7 @@ function MeshGraph({ data }) { .append('g') .selectAll('g') .attr('class', 'chart-legend') + .attr('data-cy', 'chart-legend') .data(graph.nodes) .enter() .append('circle') @@ -238,6 +242,7 @@ function MeshGraph({ data }) { svg .append('g') .attr('class', 'chart-text') + .attr('data-cy', 'chart-text') .selectAll('g') .data(graph.nodes) .enter() @@ -252,6 +257,7 @@ function MeshGraph({ data }) { .select('#chart') .append('div') .attr('class', 'd3-tooltip') + .attr('data-cy', 'd3-tooltip') .style('position', 'absolute') .style('top', '200px') .style('right', '40px') From cd54d560b3d138d3bab821e1dd9936d4496f6636 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Wed, 26 Jan 2022 09:19:53 -0800 Subject: [PATCH 011/125] Update layout; fix multiple renders happening on page load. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 252 +++++++++--------- .../src/screens/TopologyView/TopologyView.js | 21 +- 2 files changed, 139 insertions(+), 134 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index cbe505eecf..729ee43776 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -1,107 +1,89 @@ -import React, { useEffect, useCallback } from 'react'; +import React, { useCallback, useEffect } from 'react'; import debounce from 'util/debounce'; -import { useHistory } from 'react-router-dom'; // import { t } from '@lingui/macro'; -import { InstancesAPI } from 'api'; import * as d3 from 'd3'; -function MeshGraph({ data }) { - // function MeshGraph() { - // const data = { - // nodes: [ - // { - // hostname: 'aapc1.local', - // node_state: 'healthy', - // node_type: 'control', - // id: 1, - // }, - // { - // hostname: 'aapc2.local', - // node_type: 'control', - // node_state: 'disabled', - // id: 2, - // }, - // { - // hostname: 'aapc3.local', - // node_type: 'control', - // node_state: 'healthy', - // id: 3, - // }, - // { - // hostname: 'aape1.local', - // node_type: 'execution', - // node_state: 'error', - // id: 4, - // }, - // { - // hostname: 'aape2.local', - // node_type: 'execution', - // node_state: 'error', - // id: 5, - // }, - // { - // hostname: 'aape3.local', - // node_type: 'execution', - // node_state: 'healthy', - // id: 6, - // }, - // { - // hostname: 'aape4.local', - // node_type: 'execution', - // node_state: 'healthy', - // id: 7, - // }, - // { - // hostname: 'aaph1.local', - // node_type: 'hop', - // node_state: 'disabled', - // id: 8, - // }, - // { - // hostname: 'aaph2.local', - // node_type: 'hop', - // node_state: 'healthy', - // id: 9, - // }, - // { - // hostname: 'aaph3.local', - // node_type: 'hop', - // node_state: 'error', - // id: 10, - // }, - // ], - // links: [ - // { source: 'aapc1.local', target: 'aapc2.local' }, - // { source: 'aapc1.local', target: 'aapc3.local' }, - // { source: 'aapc1.local', target: 'aape1.local' }, - // { source: 'aapc1.local', target: 'aape2.local' }, - - // { source: 'aapc2.local', target: 'aapc3.local' }, - // { source: 'aapc2.local', target: 'aape1.local' }, - // { source: 'aapc2.local', target: 'aape2.local' }, - - // { source: 'aapc3.local', target: 'aape1.local' }, - // { source: 'aapc3.local', target: 'aape2.local' }, - - // { source: 'aape3.local', target: 'aaph1.local' }, - // { source: 'aape3.local', target: 'aaph2.local' }, - - // { source: 'aape4.local', target: 'aaph3.local' }, - - // { source: 'aaph1.local', target: 'aapc1.local' }, - // { source: 'aaph1.local', target: 'aapc2.local' }, - // { source: 'aaph1.local', target: 'aapc3.local' }, - - // { source: 'aaph2.local', target: 'aapc1.local' }, - // { source: 'aaph2.local', target: 'aapc2.local' }, - // { source: 'aaph2.local', target: 'aapc3.local' }, - - // { source: 'aaph3.local', target: 'aaph1.local' }, - // { source: 'aaph3.local', target: 'aaph2.local' }, - // ], - // }; - const history = useHistory(); +// function MeshGraph({ data }) { +function MeshGraph({ redirectToDetailsPage }) { const draw = useCallback(() => { + const data = { + nodes: [ + { + id: 1, + hostname: 'awx_1', + node_type: 'hybrid', + node_state: 'healthy', + }, + { + id: 3, + hostname: 'receptor-1', + node_type: 'execution', + node_state: 'healthy', + }, + { + id: 4, + hostname: 'receptor-2', + node_type: 'execution', + node_state: 'healthy', + }, + { + id: 2, + hostname: 'receptor-hop', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 5, + hostname: 'receptor-hop-1', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 6, + hostname: 'receptor-hop-2', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 7, + hostname: 'receptor-hop-3', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 8, + hostname: 'receptor-hop-4', + node_type: 'hop', + node_state: 'healthy', + }, + ], + links: [ + { + source: 'receptor-hop', + target: 'awx_1', + }, + { + source: 'receptor-1', + target: 'receptor-hop', + }, + { + source: 'receptor-2', + target: 'receptor-hop', + }, + { + source: 'receptor-hop-3', + target: 'receptor-hop', + }, + // { + // "source": "receptor-2", + // "target": "receptor-hop-1" + // }, + // { + // "source": "receptor-2", + // "target": "receptor-hop-2" + // } + ], + }; const margin = 80; const getWidth = () => { let width; @@ -124,7 +106,7 @@ function MeshGraph({ data }) { const zoom = d3 .zoom() - .scaleExtent([1, 8]) + // .scaleExtent([1, 8]) .on('zoom', (event) => { svg.attr('transform', event.transform); }); @@ -145,27 +127,41 @@ function MeshGraph({ data }) { const simulation = d3 .forceSimulation() + .force('charge', d3.forceManyBody(75).strength(-100)) .force( 'link', d3.forceLink().id((d) => d.hostname) ) - .force('charge', d3.forceManyBody().strength(-350)) - .force( - 'collide', - d3.forceCollide((d) => - d.node_type === 'execution' || d.node_type === 'hop' ? 75 : 100 - ) - ) + .force('collide', d3.forceCollide(80)) + .force('forceX', d3.forceX(0)) + .force('forceY', d3.forceY(0)) .force('center', d3.forceCenter(width / 2, height / 2)); + // const simulation = d3 + // .forceSimulation() + // .force( + // 'link', + // d3.forceLink().id((d) => d.hostname) + // ) + // .force('charge', d3.forceManyBody().strength(-350)) + // .force( + // 'collide', + // d3.forceCollide((d) => + // d.node_type === 'execution' || d.node_type === 'hop' ? 75 : 100 + // ) + // ) + // .force('center', d3.forceCenter(width / 2, height / 2)); + const link = svg .append('g') .attr('class', `links`) .attr('data-cy', 'links') - .selectAll('path') + // .selectAll('path') + .selectAll('line') .data(graph.links) .enter() - .append('path') + .append('line') + // .append('path') .attr('class', (d, i) => `link-${i}`) .attr('data-cy', (d) => `${d.source}-${d.target}`) .style('fill', 'none') @@ -285,17 +281,22 @@ function MeshGraph({ data }) { simulation.force('link').links(graph.links); function ticked() { - link.attr('d', linkArc); + // link.attr('d', linkArc); + link + .attr('x1', (d) => d.source.x) + .attr('y1', (d) => d.source.y) + .attr('x2', (d) => d.target.x) + .attr('y2', (d) => d.target.y); node.attr('transform', (d) => `translate(${d.x},${d.y})`); } - function linkArc(d) { - const dx = d.target.x - d.source.x; - const dy = d.target.y - d.source.y; - const dr = Math.sqrt(dx * dx + dy * dy); - return `M${d.source.x},${d.source.y}A${dr},${dr} 0 0,1 ${d.target.x},${d.target.y}`; - } + // function linkArc(d) { + // const dx = d.target.x - d.source.x; + // const dy = d.target.y - d.source.y; + // const dr = Math.sqrt(dx * dx + dy * dy); + // return `M${d.source.x},${d.source.y}A${dr},${dr} 0 0,1 ${d.target.x},${d.target.y}`; + // } function contractGlow() { svg @@ -329,7 +330,7 @@ function MeshGraph({ data }) { hop: '#C46100', execution: '#F0AB00', hybrid: '#0066CC', - control: '#005F60' + control: '#005F60', }; return colorKey[nodeType]; @@ -377,18 +378,7 @@ function MeshGraph({ data }) { .attr('r', defaultRadius); }); } - - async function redirectToDetailsPage({ id: nodeId }) { - const { - data: { results }, - } = await InstancesAPI.readInstanceGroup(nodeId); - const { id: instanceGroupId } = results[0]; - const constructedURL = `/instance_groups/${instanceGroupId}/instances/${nodeId}/details`; - history.push(constructedURL); - } - - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [data]); + }, []); // eslint-disable-line react-hooks/exhaustive-deps useEffect(() => { function handleResize() { @@ -396,9 +386,7 @@ function MeshGraph({ data }) { } window.addEventListener('resize', debounce(handleResize, 500)); - - handleResize(); - + draw(); return () => window.removeEventListener('resize', handleResize); }, [draw]); diff --git a/awx/ui/src/screens/TopologyView/TopologyView.js b/awx/ui/src/screens/TopologyView/TopologyView.js index 2e62a01e0d..a3ee8ffab3 100644 --- a/awx/ui/src/screens/TopologyView/TopologyView.js +++ b/awx/ui/src/screens/TopologyView/TopologyView.js @@ -1,10 +1,11 @@ import React, { useEffect, useCallback } from 'react'; +import { useHistory } from 'react-router-dom'; import { t } from '@lingui/macro'; import ScreenHeader from 'components/ScreenHeader/ScreenHeader'; import { PageSection, Card, CardBody } from '@patternfly/react-core'; import useRequest from 'hooks/useRequest'; -import { MeshAPI } from 'api'; +import { MeshAPI, InstancesAPI } from 'api'; import MeshGraph from './MeshGraph'; function TopologyView() { @@ -21,6 +22,15 @@ function TopologyView() { }, []), { meshData: { nodes: [], links: [] } } ); + async function RedirectToDetailsPage({ id: nodeId }) { + const history = useHistory(); + const { + data: { results }, + } = await InstancesAPI.readInstanceGroup(nodeId); + const { id: instanceGroupId } = results[0]; + const constructedURL = `/instance_groups/${instanceGroupId}/instances/${nodeId}/details`; + history.push(constructedURL); + } useEffect(() => { fetchMeshVisualizer(); }, [fetchMeshVisualizer]); @@ -30,7 +40,14 @@ function TopologyView() { - {meshData && } + + {meshData && ( + + )} + From 8090cd3032af05c410294db0a23d28bf8b4dcf6f Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Wed, 2 Feb 2022 16:00:07 -0800 Subject: [PATCH 012/125] WIP new mesh layout based on QE feedback. --- awx/ui/src/screens/TopologyView/Legend.js | 139 +++++ awx/ui/src/screens/TopologyView/MeshGraph.js | 506 +++++++++--------- awx/ui/src/screens/TopologyView/Tooltip.js | 107 ++++ .../src/screens/TopologyView/TopologyView.js | 23 +- 4 files changed, 500 insertions(+), 275 deletions(-) create mode 100644 awx/ui/src/screens/TopologyView/Legend.js create mode 100644 awx/ui/src/screens/TopologyView/Tooltip.js diff --git a/awx/ui/src/screens/TopologyView/Legend.js b/awx/ui/src/screens/TopologyView/Legend.js new file mode 100644 index 0000000000..3ecc5492d5 --- /dev/null +++ b/awx/ui/src/screens/TopologyView/Legend.js @@ -0,0 +1,139 @@ +/* eslint-disable i18next/no-literal-string */ +import React from 'react'; +import styled from 'styled-components'; +import { + Button as PFButton, + DescriptionList as PFDescriptionList, + DescriptionListTerm, + DescriptionListGroup as PFDescriptionListGroup, + DescriptionListDescription as PFDescriptionListDescription, + Divider, + TextContent, + Text as PFText, + TextVariants, +} from '@patternfly/react-core'; + +import { + ExclamationIcon as PFExclamationIcon, + CheckIcon as PFCheckIcon, +} from '@patternfly/react-icons'; + +const Wrapper = styled.div` + position: absolute; + top: -20px; + left: 0; + padding: 10px; + width: 190px; +`; +const Button = styled(PFButton)` + width: 20px; + height: 20px; + border-radius: 10px; + padding: 0; + font-size: 11px; +`; +const DescriptionListDescription = styled(PFDescriptionListDescription)` + font-size: 11px; +`; +const ExclamationIcon = styled(PFExclamationIcon)` + fill: white; + margin-left: 2px; +`; +const CheckIcon = styled(PFCheckIcon)` + fill: white; + margin-left: 2px; +`; +const DescriptionList = styled(PFDescriptionList)` + gap: 7px; +`; +const DescriptionListGroup = styled(PFDescriptionListGroup)` + align-items: center; +`; +const Text = styled(PFText)` + margin: 10px 0 5px; +`; + +function Legend() { + return ( + + + + Legend + + + Node types + + + + + + + Control node + + + + + + + Execution node + + + + + + + Hybrid node + + + + + + Hop node + + + + Status types + + + + + + + + {nodeDetail.hostname} + + + + Type + + {nodeDetail.node_type} node + + + + Status + + + + + + + )} + + ); +} + +export default Tooltip; diff --git a/awx/ui/src/screens/TopologyView/TopologyView.js b/awx/ui/src/screens/TopologyView/TopologyView.js index a3ee8ffab3..0d596cc5e4 100644 --- a/awx/ui/src/screens/TopologyView/TopologyView.js +++ b/awx/ui/src/screens/TopologyView/TopologyView.js @@ -1,15 +1,14 @@ import React, { useEffect, useCallback } from 'react'; -import { useHistory } from 'react-router-dom'; - import { t } from '@lingui/macro'; import ScreenHeader from 'components/ScreenHeader/ScreenHeader'; import { PageSection, Card, CardBody } from '@patternfly/react-core'; import useRequest from 'hooks/useRequest'; -import { MeshAPI, InstancesAPI } from 'api'; +import { MeshAPI } from 'api'; import MeshGraph from './MeshGraph'; function TopologyView() { const { + isLoading, result: { meshData }, // error: fetchInitialError, request: fetchMeshVisualizer, @@ -22,15 +21,6 @@ function TopologyView() { }, []), { meshData: { nodes: [], links: [] } } ); - async function RedirectToDetailsPage({ id: nodeId }) { - const history = useHistory(); - const { - data: { results }, - } = await InstancesAPI.readInstanceGroup(nodeId); - const { id: instanceGroupId } = results[0]; - const constructedURL = `/instance_groups/${instanceGroupId}/instances/${nodeId}/details`; - history.push(constructedURL); - } useEffect(() => { fetchMeshVisualizer(); }, [fetchMeshVisualizer]); @@ -40,14 +30,7 @@ function TopologyView() { - - {meshData && ( - - )} - + {!isLoading && } From 3cfab418d1ee8f666bfd25bc4c39ab42d4029a09 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Wed, 2 Feb 2022 20:09:03 -0800 Subject: [PATCH 013/125] Fix zoom on scroll. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 327 +++++++++++++------ 1 file changed, 233 insertions(+), 94 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 399fc3d868..6644fae22a 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -7,96 +7,234 @@ import * as d3 from 'd3'; import Legend from './Legend'; import Tooltip from './Tooltip'; -function MeshGraph({ data }) { - // function MeshGraph() { +// function MeshGraph({ data }) { +function MeshGraph() { const [isNodeSelected, setIsNodeSelected] = useState(false); const [selectedNode, setSelectedNode] = useState(null); const [nodeDetail, setNodeDetail] = useState(null); const history = useHistory(); const draw = () => { - // const data = { - // nodes: [ - // { - // id: 1, - // hostname: 'awx_1', - // node_type: 'hybrid', - // node_state: 'healthy', - // }, - // { - // id: 3, - // hostname: 'receptor-1', - // node_type: 'execution', - // node_state: 'healthy', - // }, - // { - // id: 4, - // hostname: 'receptor-2', - // node_type: 'execution', - // node_state: 'healthy', - // }, - // { - // id: 2, - // hostname: 'receptor-hop', - // node_type: 'hop', - // node_state: 'healthy', - // }, - // { - // id: 5, - // hostname: 'receptor-hop-1', - // node_type: 'hop', - // node_state: 'healthy', - // }, - // { - // id: 6, - // hostname: 'receptor-hop-2', - // node_type: 'hop', - // node_state: 'disabled', - // }, - // { - // id: 7, - // hostname: 'receptor-hop-3', - // node_type: 'hop', - // node_state: 'error', - // }, - // { - // id: 8, - // hostname: 'receptor-hop-4', - // node_type: 'hop', - // node_state: 'healthy', - // }, - // ], - // links: [ - // { - // source: 'receptor-hop', - // target: 'awx_1', - // }, - // { - // source: 'receptor-1', - // target: 'receptor-hop', - // }, - // { - // source: 'receptor-2', - // target: 'receptor-hop', - // }, - // { - // source: 'receptor-hop-3', - // target: 'receptor-hop', - // }, - // // { - // // "source": "receptor-hop", - // // "target": "receptor-hop-1" - // // }, - // // { - // // "source": "receptor-1", - // // "target": "receptor-hop-2" - // // }, - // // { - // // "source": "awx_1", - // // "target": "receptor-hop-4" - // // } - // ], - // }; + const data = { + nodes: [ + { + id: 1, + hostname: 'awx_1', + node_type: 'hybrid', + node_state: 'healthy', + }, + { + id: 3, + hostname: 'receptor-1', + node_type: 'execution', + node_state: 'healthy', + }, + { + id: 4, + hostname: 'receptor-2', + node_type: 'execution', + node_state: 'healthy', + }, + { + id: 2, + hostname: 'receptor-hop', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 5, + hostname: 'receptor-hop-1', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 6, + hostname: 'receptor-hop-2', + node_type: 'hop', + node_state: 'disabled', + }, + { + id: 7, + hostname: 'receptor-hop-3', + node_type: 'hop', + node_state: 'error', + }, + { + id: 8, + hostname: 'receptor-hop-4', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 9, + hostname: 'receptor-hop-5', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 10, + hostname: 'receptor-hop-5', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 11, + hostname: 'receptor-hop-6', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 12, + hostname: 'awx_1', + node_type: 'hybrid', + node_state: 'healthy', + }, + { + id: 13, + hostname: 'receptor-1', + node_type: 'execution', + node_state: 'healthy', + }, + { + id: 14, + hostname: 'receptor-2', + node_type: 'execution', + node_state: 'healthy', + }, + { + id: 1, + hostname: 'receptor-hop', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 5, + hostname: 'receptor-hop-1', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 6, + hostname: 'receptor-hop-2', + node_type: 'hop', + node_state: 'disabled', + }, + { + id: 7, + hostname: 'receptor-hop-3', + node_type: 'hop', + node_state: 'error', + }, + { + id: 8, + hostname: 'receptor-hop-4', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 9, + hostname: 'receptor-hop-5', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 10, + hostname: 'receptor-hop-5', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 11, + hostname: 'receptor-hop-6', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 14, + hostname: 'receptor-2', + node_type: 'execution', + node_state: 'healthy', + }, + { + id: 1, + hostname: 'receptor-hop', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 5, + hostname: 'receptor-hop-1', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 6, + hostname: 'receptor-hop-2', + node_type: 'hop', + node_state: 'disabled', + }, + { + id: 7, + hostname: 'receptor-hop-3', + node_type: 'hop', + node_state: 'error', + }, + { + id: 8, + hostname: 'receptor-hop-4', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 9, + hostname: 'receptor-hop-5', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 10, + hostname: 'receptor-hop-5', + node_type: 'hop', + node_state: 'healthy', + }, + { + id: 11, + hostname: 'receptor-hop-6', + node_type: 'hop', + node_state: 'healthy', + }, + ], + links: [ + { + source: 'receptor-hop', + target: 'awx_1', + }, + { + source: 'receptor-1', + target: 'receptor-hop', + }, + { + source: 'receptor-2', + target: 'receptor-hop', + }, + { + source: 'receptor-hop-3', + target: 'receptor-hop', + }, + // { + // "source": "receptor-hop", + // "target": "receptor-hop-1" + // }, + // { + // "source": "receptor-1", + // "target": "receptor-hop-2" + // }, + // { + // "source": "awx_1", + // "target": "receptor-hop-4" + // } + ], + }; const margin = 15; const defaultRadius = 16; const defaultCollisionFactor = 80; @@ -126,13 +264,7 @@ function MeshGraph({ data }) { return width; }; const width = getWidth(); - - // const zoom = d3 - // .zoom() - // // .scaleExtent([1, 8]) - // .on('zoom', (event) => { - // svg.attr('transform', event.transform); - // }); + const zoom = d3.zoom().scaleExtent([-40, 40]).on('zoom', zoomed); /* Add SVG */ d3.selectAll(`#chart > svg`).remove(); @@ -142,9 +274,10 @@ function MeshGraph({ data }) { .append('svg') .attr('width', `${width + margin}px`) .attr('height', `${height + margin}px`) + .attr('viewBox', [0, 0, width, height]); + const mesh = svg .append('g') .attr('transform', `translate(${margin}, ${margin})`); - // .call(zoom); const graph = data; @@ -163,7 +296,7 @@ function MeshGraph({ data }) { .force('forceY', d3.forceY(defaultForceY)) .force('center', d3.forceCenter(width / 2, height / 2)); - const link = svg + const link = mesh .append('g') .attr('class', `links`) .attr('data-cy', 'links') @@ -181,7 +314,7 @@ function MeshGraph({ data }) { d3.select(this).transition().style('cursor', 'pointer'); }); - const node = svg + const node = mesh .append('g') .attr('class', 'nodes') .attr('data-cy', 'nodes') @@ -263,6 +396,8 @@ function MeshGraph({ data }) { node.attr('transform', (d) => `translate(${d.x},${d.y})`); } + svg.call(zoom); + function renderStateColor(nodeState) { const colorKey = { disabled: '#6A6E73', @@ -340,6 +475,10 @@ function MeshGraph({ data }) { setIsNodeSelected(true); setSelectedNode(n); } + + function zoomed({ transform }) { + mesh.attr('transform', transform); + } }; async function redirectToDetailsPage() { From 7378952a8b73694e2bbe5e99cc0e66dedacc6f2b Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 3 Feb 2022 08:46:04 -0800 Subject: [PATCH 014/125] Add opaque bg to tooltip and legend. --- awx/ui/src/screens/TopologyView/Legend.js | 5 +++-- awx/ui/src/screens/TopologyView/Tooltip.js | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/Legend.js b/awx/ui/src/screens/TopologyView/Legend.js index 3ecc5492d5..29e0d0516b 100644 --- a/awx/ui/src/screens/TopologyView/Legend.js +++ b/awx/ui/src/screens/TopologyView/Legend.js @@ -23,7 +23,8 @@ const Wrapper = styled.div` top: -20px; left: 0; padding: 10px; - width: 190px; + width: 150px; + background-color: rgba(255, 255, 255, 0.85); `; const Button = styled(PFButton)` width: 20px; @@ -88,7 +89,7 @@ function Legend() { Hybrid node diff --git a/awx/ui/src/screens/TopologyView/Tooltip.js b/awx/ui/src/screens/TopologyView/Tooltip.js index 2026525260..f294e5a1ed 100644 --- a/awx/ui/src/screens/TopologyView/Tooltip.js +++ b/awx/ui/src/screens/TopologyView/Tooltip.js @@ -24,6 +24,7 @@ const Wrapper = styled.div` right: 0; padding: 10px; width: 20%; + background-color: rgba(255, 255, 255, 0.85); `; const Button = styled(PFButton)` width: 20px; From f3474f081150a3f889e915d3e8eabaee74fc83b6 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 3 Feb 2022 09:32:23 -0800 Subject: [PATCH 015/125] Add legend toggle to header. --- awx/ui/src/screens/TopologyView/Header.js | 53 +++++++++++++++++++ awx/ui/src/screens/TopologyView/MeshGraph.js | 4 +- .../src/screens/TopologyView/TopologyView.js | 18 +++++-- 3 files changed, 68 insertions(+), 7 deletions(-) create mode 100644 awx/ui/src/screens/TopologyView/Header.js diff --git a/awx/ui/src/screens/TopologyView/Header.js b/awx/ui/src/screens/TopologyView/Header.js new file mode 100644 index 0000000000..096d79ce22 --- /dev/null +++ b/awx/ui/src/screens/TopologyView/Header.js @@ -0,0 +1,53 @@ +import React from 'react'; +import PropTypes from 'prop-types'; + +import { t } from '@lingui/macro'; +import { + PageSection, + PageSectionVariants, + Switch, + Title, + Tooltip, +} from '@patternfly/react-core'; + +const Header = ({ title, handleSwitchToggle, toggleState }) => { + const { light } = PageSectionVariants; + + return ( + +
+
+ + {title} + +
+
+ + handleSwitchToggle(!toggleState)} + /> + +
+
+
+ ); +}; + +Header.propTypes = { + title: PropTypes.string.isRequired, +}; + +export default Header; diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 6644fae22a..d9b4a98a91 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -8,7 +8,7 @@ import Legend from './Legend'; import Tooltip from './Tooltip'; // function MeshGraph({ data }) { -function MeshGraph() { +function MeshGraph({ showLegend }) { const [isNodeSelected, setIsNodeSelected] = useState(false); const [selectedNode, setSelectedNode] = useState(null); const [nodeDetail, setNodeDetail] = useState(null); @@ -517,7 +517,7 @@ function MeshGraph() { return (
- + {showLegend && } - - +
- {!isLoading && } + + {!isLoading && ( + + )} + From afebcc574d2ecdb1e2d9c339e9aa25fe29cdb8ca Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 3 Feb 2022 10:28:27 -0800 Subject: [PATCH 016/125] Add icons to header; randomly generate data. --- awx/ui/src/screens/TopologyView/Header.js | 37 +++ awx/ui/src/screens/TopologyView/MeshGraph.js | 259 +++---------------- 2 files changed, 76 insertions(+), 220 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/Header.js b/awx/ui/src/screens/TopologyView/Header.js index 096d79ce22..ed94f2d295 100644 --- a/awx/ui/src/screens/TopologyView/Header.js +++ b/awx/ui/src/screens/TopologyView/Header.js @@ -3,6 +3,7 @@ import PropTypes from 'prop-types'; import { t } from '@lingui/macro'; import { + Button, PageSection, PageSectionVariants, Switch, @@ -10,6 +11,12 @@ import { Tooltip, } from '@patternfly/react-core'; +import { + SearchMinusIcon, + SearchPlusIcon, + ExpandArrowsAltIcon, +} from '@patternfly/react-icons'; + const Header = ({ title, handleSwitchToggle, toggleState }) => { const { light } = PageSectionVariants; @@ -32,6 +39,36 @@ const Header = ({ title, handleSwitchToggle, toggleState }) => {
+ + + + + + + + + { + for (let i = 0; i < r; i++) { + const link = { + source: n[getRandomInt(0, n.length - 1)].hostname, + target: n[getRandomInt(0, n.length - 1)].hostname, + }; + links.push(link); + } + return { nodes: n, links }; + }; + const generateNodes = (n) => { + function getRandomType() { + return ['hybrid', 'execution', 'control', 'hop'][getRandomInt(0, 3)]; + } + function getRandomState() { + return ['healthy', 'error', 'disabled'][getRandomInt(0, 2)]; + } + for (let i = 0; i < n; i++) { + const id = i + 1; + const randomType = getRandomType(); + const randomState = getRandomState(); + const node = { + id, + hostname: `node-${id}`, + node_type: randomType, + node_state: randomState, + }; + nodes.push(node); + } + return generateLinks(nodes, getRandomInt(1, n - 1)); + }; + const data = generateNodes(getRandomInt(5, 30)); const draw = () => { - const data = { - nodes: [ - { - id: 1, - hostname: 'awx_1', - node_type: 'hybrid', - node_state: 'healthy', - }, - { - id: 3, - hostname: 'receptor-1', - node_type: 'execution', - node_state: 'healthy', - }, - { - id: 4, - hostname: 'receptor-2', - node_type: 'execution', - node_state: 'healthy', - }, - { - id: 2, - hostname: 'receptor-hop', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 5, - hostname: 'receptor-hop-1', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 6, - hostname: 'receptor-hop-2', - node_type: 'hop', - node_state: 'disabled', - }, - { - id: 7, - hostname: 'receptor-hop-3', - node_type: 'hop', - node_state: 'error', - }, - { - id: 8, - hostname: 'receptor-hop-4', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 9, - hostname: 'receptor-hop-5', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 10, - hostname: 'receptor-hop-5', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 11, - hostname: 'receptor-hop-6', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 12, - hostname: 'awx_1', - node_type: 'hybrid', - node_state: 'healthy', - }, - { - id: 13, - hostname: 'receptor-1', - node_type: 'execution', - node_state: 'healthy', - }, - { - id: 14, - hostname: 'receptor-2', - node_type: 'execution', - node_state: 'healthy', - }, - { - id: 1, - hostname: 'receptor-hop', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 5, - hostname: 'receptor-hop-1', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 6, - hostname: 'receptor-hop-2', - node_type: 'hop', - node_state: 'disabled', - }, - { - id: 7, - hostname: 'receptor-hop-3', - node_type: 'hop', - node_state: 'error', - }, - { - id: 8, - hostname: 'receptor-hop-4', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 9, - hostname: 'receptor-hop-5', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 10, - hostname: 'receptor-hop-5', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 11, - hostname: 'receptor-hop-6', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 14, - hostname: 'receptor-2', - node_type: 'execution', - node_state: 'healthy', - }, - { - id: 1, - hostname: 'receptor-hop', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 5, - hostname: 'receptor-hop-1', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 6, - hostname: 'receptor-hop-2', - node_type: 'hop', - node_state: 'disabled', - }, - { - id: 7, - hostname: 'receptor-hop-3', - node_type: 'hop', - node_state: 'error', - }, - { - id: 8, - hostname: 'receptor-hop-4', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 9, - hostname: 'receptor-hop-5', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 10, - hostname: 'receptor-hop-5', - node_type: 'hop', - node_state: 'healthy', - }, - { - id: 11, - hostname: 'receptor-hop-6', - node_type: 'hop', - node_state: 'healthy', - }, - ], - links: [ - { - source: 'receptor-hop', - target: 'awx_1', - }, - { - source: 'receptor-1', - target: 'receptor-hop', - }, - { - source: 'receptor-2', - target: 'receptor-hop', - }, - { - source: 'receptor-hop-3', - target: 'receptor-hop', - }, - // { - // "source": "receptor-hop", - // "target": "receptor-hop-1" - // }, - // { - // "source": "receptor-1", - // "target": "receptor-hop-2" - // }, - // { - // "source": "awx_1", - // "target": "receptor-hop-4" - // } - ], - }; const margin = 15; const defaultRadius = 16; const defaultCollisionFactor = 80; From cf459dc4e8a73f9317d493a0cd3e6ef053bffeb0 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Fri, 4 Feb 2022 07:58:52 -0800 Subject: [PATCH 017/125] Remove placeholder label text. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 15d4f70b7f..f971807121 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -19,8 +19,8 @@ function MeshGraph({ showLegend }) { max = Math.floor(max); return Math.floor(Math.random() * (max - min + 1)) + min; } - let nodes = []; - let links = []; + const nodes = []; + const links = []; const generateLinks = (n, r) => { for (let i = 0; i < r; i++) { const link = { @@ -175,6 +175,7 @@ function MeshGraph({ showLegend }) { hostNames .append('text') .text((d) => renderLabelText(d.node_state, d.hostname)) + .attr('class', 'placeholder') .attr('fill', defaultNodeLabelColor) .attr('text-anchor', 'middle') .attr('y', 40) @@ -192,7 +193,7 @@ function MeshGraph({ showLegend }) { .attr('ry', 8) .style('fill', (d) => renderStateColor(d.node_state)); }); - + svg.selectAll('text.placeholder').remove(); hostNames .append('text') .text((d) => renderLabelText(d.node_state, d.hostname)) From 04a550cc67de82b844eb7d01af8defbf096a64b1 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Sun, 6 Feb 2022 11:33:04 -0800 Subject: [PATCH 018/125] Hook up zoom in, zoom out buttons. --- awx/ui/src/screens/TopologyView/Header.js | 11 +++++++++-- awx/ui/src/screens/TopologyView/MeshGraph.js | 9 +++------ awx/ui/src/screens/TopologyView/TopologyView.js | 16 +++++++++++++++- 3 files changed, 27 insertions(+), 9 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/Header.js b/awx/ui/src/screens/TopologyView/Header.js index ed94f2d295..4dacc7b598 100644 --- a/awx/ui/src/screens/TopologyView/Header.js +++ b/awx/ui/src/screens/TopologyView/Header.js @@ -17,9 +17,14 @@ import { ExpandArrowsAltIcon, } from '@patternfly/react-icons'; -const Header = ({ title, handleSwitchToggle, toggleState }) => { +const Header = ({ + title, + handleSwitchToggle, + toggleState, + zoomIn, + zoomOut, +}) => { const { light } = PageSectionVariants; - return (
{ aria-label={t`Zoom in`} variant="plain" icon={} + onClick={zoomIn} > @@ -55,6 +61,7 @@ const Header = ({ title, handleSwitchToggle, toggleState }) => { aria-label={t`Zoom out`} variant="plain" icon={} + onClick={zoomOut} > diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index f971807121..ab808f2a50 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -8,7 +8,7 @@ import Legend from './Legend'; import Tooltip from './Tooltip'; // function MeshGraph({ data }) { -function MeshGraph({ showLegend }) { +function MeshGraph({ showLegend, zoom }) { const [isNodeSelected, setIsNodeSelected] = useState(false); const [selectedNode, setSelectedNode] = useState(null); const [nodeDetail, setNodeDetail] = useState(null); @@ -83,7 +83,6 @@ function MeshGraph({ showLegend }) { return width; }; const width = getWidth(); - const zoom = d3.zoom().scaleExtent([-40, 40]).on('zoom', zoomed); /* Add SVG */ d3.selectAll(`#chart > svg`).remove(); @@ -91,11 +90,13 @@ function MeshGraph({ showLegend }) { const svg = d3 .select('#chart') .append('svg') + .attr('class', 'mesh-svg') .attr('width', `${width + margin}px`) .attr('height', `${height + margin}px`) .attr('viewBox', [0, 0, width, height]); const mesh = svg .append('g') + .attr('class', 'mesh') .attr('transform', `translate(${margin}, ${margin})`); const graph = data; @@ -295,10 +296,6 @@ function MeshGraph({ showLegend }) { setIsNodeSelected(true); setSelectedNode(n); } - - function zoomed({ transform }) { - mesh.attr('transform', transform); - } }; async function redirectToDetailsPage() { diff --git a/awx/ui/src/screens/TopologyView/TopologyView.js b/awx/ui/src/screens/TopologyView/TopologyView.js index 1fde272709..ac36360669 100644 --- a/awx/ui/src/screens/TopologyView/TopologyView.js +++ b/awx/ui/src/screens/TopologyView/TopologyView.js @@ -1,4 +1,5 @@ import React, { useEffect, useCallback, useState } from 'react'; +import * as d3 from 'd3'; import { t } from '@lingui/macro'; import { PageSection, Card, CardBody } from '@patternfly/react-core'; import useRequest from 'hooks/useRequest'; @@ -25,18 +26,31 @@ function TopologyView() { useEffect(() => { fetchMeshVisualizer(); }, [fetchMeshVisualizer]); + + const zoom = d3.zoom().on('zoom', ({ transform }) => { + d3.select('.mesh').attr('transform', transform); + }); + const zoomIn = () => { + d3.select('.mesh-svg').transition().call(zoom.scaleBy, 2); + }; + const zoomOut = () => { + d3.select('.mesh-svg').transition().call(zoom.scaleBy, 0.5); + }; + return ( <>
{!isLoading && ( - + )} From 391907c41ea38080eed4e6f3a3efcac70bb39807 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Sun, 6 Feb 2022 11:41:59 -0800 Subject: [PATCH 019/125] Add reset zoom button. --- awx/ui/src/screens/TopologyView/Header.js | 13 +++++++++++++ awx/ui/src/screens/TopologyView/TopologyView.js | 10 ++++++++++ 2 files changed, 23 insertions(+) diff --git a/awx/ui/src/screens/TopologyView/Header.js b/awx/ui/src/screens/TopologyView/Header.js index 4dacc7b598..e1b8f6648d 100644 --- a/awx/ui/src/screens/TopologyView/Header.js +++ b/awx/ui/src/screens/TopologyView/Header.js @@ -15,6 +15,7 @@ import { SearchMinusIcon, SearchPlusIcon, ExpandArrowsAltIcon, + ExpandIcon, } from '@patternfly/react-icons'; const Header = ({ @@ -23,6 +24,7 @@ const Header = ({ toggleState, zoomIn, zoomOut, + resetZoom, }) => { const { light } = PageSectionVariants; return ( @@ -76,6 +78,17 @@ const Header = ({ + + + { d3.select('.mesh-svg').transition().call(zoom.scaleBy, 0.5); }; + const resetZoom = () => { + const margin = 15; + const width = parseInt(d3.select(`#chart`).style('width'), 10) - margin; + d3.select('.mesh-svg').transition().duration(750).call( + zoom.transform, + d3.zoomIdentity, + d3.zoomTransform(d3.select('.mesh-svg').node()).invert([width / 2, 600 / 2]) + ); + } return ( <> @@ -45,6 +54,7 @@ function TopologyView() { toggleState={showLegend} zoomIn={zoomIn} zoomOut={zoomOut} + resetZoom={resetZoom} /> From b859c3360de09ff6ca9355c62c7ab9bebc453902 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Mon, 7 Feb 2022 09:12:49 -0800 Subject: [PATCH 020/125] Add zoom to fit. --- awx/ui/src/screens/TopologyView/Header.js | 2 + awx/ui/src/screens/TopologyView/MeshGraph.js | 2 +- .../src/screens/TopologyView/TopologyView.js | 40 ++++++++++++++++--- 3 files changed, 37 insertions(+), 7 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/Header.js b/awx/ui/src/screens/TopologyView/Header.js index e1b8f6648d..4929c8ceb1 100644 --- a/awx/ui/src/screens/TopologyView/Header.js +++ b/awx/ui/src/screens/TopologyView/Header.js @@ -25,6 +25,7 @@ const Header = ({ zoomIn, zoomOut, resetZoom, + zoomFit, }) => { const { light } = PageSectionVariants; return ( @@ -74,6 +75,7 @@ const Header = ({ aria-label={t`Fit to screen`} variant="plain" icon={} + onClick={zoomFit} > diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index ab808f2a50..a94c805340 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -52,7 +52,7 @@ function MeshGraph({ showLegend, zoom }) { } return generateLinks(nodes, getRandomInt(1, n - 1)); }; - const data = generateNodes(getRandomInt(5, 30)); + const data = generateNodes(getRandomInt(250, 250)); const draw = () => { const margin = 15; const defaultRadius = 16; diff --git a/awx/ui/src/screens/TopologyView/TopologyView.js b/awx/ui/src/screens/TopologyView/TopologyView.js index bad237100b..21c52537ba 100644 --- a/awx/ui/src/screens/TopologyView/TopologyView.js +++ b/awx/ui/src/screens/TopologyView/TopologyView.js @@ -38,13 +38,40 @@ function TopologyView() { }; const resetZoom = () => { const margin = 15; + const height = 600; const width = parseInt(d3.select(`#chart`).style('width'), 10) - margin; - d3.select('.mesh-svg').transition().duration(750).call( - zoom.transform, - d3.zoomIdentity, - d3.zoomTransform(d3.select('.mesh-svg').node()).invert([width / 2, 600 / 2]) - ); - } + d3.select('.mesh-svg') + .transition() + .duration(750) + .call( + zoom.transform, + d3.zoomIdentity, + d3 + .zoomTransform(d3.select('.mesh-svg').node()) + .invert([width / 2, height / 2]) + ); + }; + + const zoomFit = () => { + const bounds = d3.select('.mesh').node().getBBox(); + const parent = d3.select('.mesh').node().parentElement; + const fullWidth = parent.clientWidth; + const fullHeight = parent.clientHeight; + const { width, height } = bounds; + const midX = bounds.x + width / 2; + const midY = bounds.y + height / 2; + if (width === 0 || height === 0) return; // nothing to fit + const scale = 0.8 / Math.max(width / fullWidth, height / fullHeight); + const translate = [ + fullWidth / 2 - scale * midX, + fullHeight / 2 - scale * midY, + ]; + const [x, y] = translate; + d3.select('.mesh-svg') + .transition() + .duration(750) + .call(zoom.transform, d3.zoomIdentity.translate(x, y).scale(scale)); + }; return ( <> @@ -54,6 +81,7 @@ function TopologyView() { toggleState={showLegend} zoomIn={zoomIn} zoomOut={zoomOut} + zoomFit={zoomFit} resetZoom={resetZoom} /> From a6bc0d42229d858f563b40014fb1f54eb36fcba7 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Tue, 8 Feb 2022 20:13:46 -0800 Subject: [PATCH 021/125] Add loading screen. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index a94c805340..71cc0fe38e 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -1,12 +1,20 @@ import React, { useEffect, useState } from 'react'; import { useHistory } from 'react-router-dom'; +import styled from 'styled-components'; import { InstancesAPI } from 'api'; import debounce from 'util/debounce'; // import { t } from '@lingui/macro'; import * as d3 from 'd3'; import Legend from './Legend'; import Tooltip from './Tooltip'; +import ContentLoading from '../../components/ContentLoading'; +const Loader = styled(ContentLoading)` + height: 100%; + position: absolute; + width: 100%; + background: white; +`; // function MeshGraph({ data }) { function MeshGraph({ showLegend, zoom }) { const [isNodeSelected, setIsNodeSelected] = useState(false); @@ -86,7 +94,6 @@ function MeshGraph({ showLegend, zoom }) { /* Add SVG */ d3.selectAll(`#chart > svg`).remove(); - const svg = d3 .select('#chart') .append('svg') @@ -208,6 +215,8 @@ function MeshGraph({ showLegend, zoom }) { function ticked() { // link.attr('d', linkArc); + d3.select('.simulation-loader').style('visibility', 'visible'); + link .attr('x1', (d) => d.source.x) .attr('y1', (d) => d.source.y) @@ -215,6 +224,9 @@ function MeshGraph({ showLegend, zoom }) { .attr('y2', (d) => d.target.y); node.attr('transform', (d) => `translate(${d.x},${d.y})`); + if (simulation.alpha() < simulation.alphaMin()) { + d3.select('.simulation-loader').style('visibility', 'hidden'); + } } svg.call(zoom); @@ -325,6 +337,7 @@ function MeshGraph({ showLegend, zoom }) { } useEffect(() => { function handleResize() { + d3.select('.simulation-loader').style('visibility', 'visible'); draw(); } window.addEventListener('resize', debounce(handleResize, 500)); @@ -341,6 +354,7 @@ function MeshGraph({ showLegend, zoom }) { nodeDetail={nodeDetail} redirectToDetailsPage={redirectToDetailsPage} /> +
); } From 4235bf67f8c2c5fc80c74d8a58d19e130ca783e7 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Tue, 8 Feb 2022 20:24:42 -0800 Subject: [PATCH 022/125] Truncate long host names in graph, show full name in tooltip. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 4 +++- awx/ui/src/util/strings.js | 7 +++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 71cc0fe38e..546b3b44bc 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -8,6 +8,7 @@ import * as d3 from 'd3'; import Legend from './Legend'; import Tooltip from './Tooltip'; import ContentLoading from '../../components/ContentLoading'; +import { truncateString } from '../../util/strings'; const Loader = styled(ContentLoading)` height: 100%; @@ -75,6 +76,7 @@ function MeshGraph({ showLegend, zoom }) { const defaultNodeHighlightColor = '#16407C'; const defaultNodeLabelColor = 'white'; const defaultFontSize = '12px'; + const labelMaxLen = 15; const getWidth = () => { let width; // This is in an a try/catch due to an error from jest. @@ -245,7 +247,7 @@ function MeshGraph({ showLegend, zoom }) { healthy: '\u2713', error: '\u0021', }; - return `${stateKey[nodeState]} ${name}`; + return `${stateKey[nodeState]} ${truncateString(name, labelMaxLen)}`; } function renderNodeType(nodeType) { diff --git a/awx/ui/src/util/strings.js b/awx/ui/src/util/strings.js index 2eee9bbe96..9fd250e450 100644 --- a/awx/ui/src/util/strings.js +++ b/awx/ui/src/util/strings.js @@ -17,3 +17,10 @@ export const stringIsUUID = (value) => /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/gi.test( value ); + +export const truncateString = (str, num) => { + if (str.length <= num) { + return str; + } + return `${str.slice(0, num)}...`; +}; From 9854f8a6abd09ef50ad02269e9af004d41a6ad4b Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Wed, 9 Feb 2022 09:19:25 -0800 Subject: [PATCH 023/125] Use alpha decay percentage instead of absolute value for loading screen. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 546b3b44bc..a30d5785d2 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -226,9 +226,7 @@ function MeshGraph({ showLegend, zoom }) { .attr('y2', (d) => d.target.y); node.attr('transform', (d) => `translate(${d.x},${d.y})`); - if (simulation.alpha() < simulation.alphaMin()) { - d3.select('.simulation-loader').style('visibility', 'hidden'); - } + calculateAlphaDecay(simulation.alpha(), simulation.alphaMin(), 35); } svg.call(zoom); @@ -310,9 +308,17 @@ function MeshGraph({ showLegend, zoom }) { setIsNodeSelected(true); setSelectedNode(n); } + + function calculateAlphaDecay(a, aMin, x) { + const decayPercentage = Math.min((aMin / a) * 100); + if (decayPercentage >= x) { + d3.select('.simulation-loader').style('visibility', 'hidden'); + } + } }; async function redirectToDetailsPage() { + // TODO: redirect to top-level instances details page const { id: nodeId } = selectedNode; const { data: { results }, From d785f30c5fdecb201233a8aa4dfcd5a79f235df5 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Wed, 9 Feb 2022 09:19:50 -0800 Subject: [PATCH 024/125] Fix JSX errors. --- awx/ui/src/screens/TopologyView/Legend.js | 4 ++-- awx/ui/src/screens/TopologyView/Tooltip.js | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/Legend.js b/awx/ui/src/screens/TopologyView/Legend.js index 29e0d0516b..2d729e5737 100644 --- a/awx/ui/src/screens/TopologyView/Legend.js +++ b/awx/ui/src/screens/TopologyView/Legend.js @@ -56,11 +56,11 @@ const Text = styled(PFText)` function Legend() { return ( - + Legend diff --git a/awx/ui/src/screens/TopologyView/Tooltip.js b/awx/ui/src/screens/TopologyView/Tooltip.js index f294e5a1ed..142f345448 100644 --- a/awx/ui/src/screens/TopologyView/Tooltip.js +++ b/awx/ui/src/screens/TopologyView/Tooltip.js @@ -50,12 +50,12 @@ function Tooltip({ redirectToDetailsPage, }) { return ( - + {isNodeSelected === false ? ( Details From 272e0126269b0ac07db6f3a5a8282e71e7383831 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Wed, 9 Feb 2022 11:18:19 -0800 Subject: [PATCH 025/125] Add new loading screen placeholder. --- .../screens/TopologyView/ContentLoading.js | 41 +++++++++++++++++++ awx/ui/src/screens/TopologyView/MeshGraph.js | 2 +- 2 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 awx/ui/src/screens/TopologyView/ContentLoading.js diff --git a/awx/ui/src/screens/TopologyView/ContentLoading.js b/awx/ui/src/screens/TopologyView/ContentLoading.js new file mode 100644 index 0000000000..c92555e47d --- /dev/null +++ b/awx/ui/src/screens/TopologyView/ContentLoading.js @@ -0,0 +1,41 @@ +import React from 'react'; + +import styled from 'styled-components'; +import { + EmptyState as PFEmptyState, + EmptyStateIcon, + Text, + TextContent, + TextVariants, + Spinner, +} from '@patternfly/react-core'; + +import { TopologyIcon as PFTopologyIcon } from '@patternfly/react-icons'; + +const EmptyState = styled(PFEmptyState)` + --pf-c-empty-state--m-lg--MaxWidth: none; + min-height: 250px; +`; + +const TopologyIcon = styled(PFTopologyIcon)` + font-size: 3em; + fill: #6a6e73; +`; + +const ContentLoading = ({ className }) => ( + + + + + Please wait until the topology view is populated... + + + + +); + +export { ContentLoading as _ContentLoading }; +export default ContentLoading; diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index a30d5785d2..67ddf3680a 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -7,7 +7,7 @@ import debounce from 'util/debounce'; import * as d3 from 'd3'; import Legend from './Legend'; import Tooltip from './Tooltip'; -import ContentLoading from '../../components/ContentLoading'; +import ContentLoading from './ContentLoading'; import { truncateString } from '../../util/strings'; const Loader = styled(ContentLoading)` From 69a42b1a89c21c337eb938cfe33309d2422add81 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 10 Feb 2022 20:47:19 -0800 Subject: [PATCH 026/125] Some lint fixes; fix routesConfig unit test. --- awx/ui/src/routeConfig.test.js | 5 +++++ awx/ui/src/screens/TopologyView/ContentLoading.js | 3 ++- awx/ui/src/screens/TopologyView/MeshGraph.js | 4 ++-- awx/ui/src/screens/TopologyView/Tooltip.js | 2 +- 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/awx/ui/src/routeConfig.test.js b/awx/ui/src/routeConfig.test.js index da0dc7e536..35e0a5eae3 100644 --- a/awx/ui/src/routeConfig.test.js +++ b/awx/ui/src/routeConfig.test.js @@ -43,6 +43,7 @@ describe('getRouteConfig', () => { '/instances', '/applications', '/execution_environments', + '/topology_view', '/settings', ]); }); @@ -71,6 +72,7 @@ describe('getRouteConfig', () => { '/instances', '/applications', '/execution_environments', + '/topology_view', '/settings', ]); }); @@ -98,6 +100,7 @@ describe('getRouteConfig', () => { '/instances', '/applications', '/execution_environments', + '/topology_view', ]); }); @@ -233,6 +236,7 @@ describe('getRouteConfig', () => { '/instances', '/applications', '/execution_environments', + '/topology_view', ]); }); @@ -263,6 +267,7 @@ describe('getRouteConfig', () => { '/instances', '/applications', '/execution_environments', + '/topology_view', ]); }); }); diff --git a/awx/ui/src/screens/TopologyView/ContentLoading.js b/awx/ui/src/screens/TopologyView/ContentLoading.js index c92555e47d..b137299c5d 100644 --- a/awx/ui/src/screens/TopologyView/ContentLoading.js +++ b/awx/ui/src/screens/TopologyView/ContentLoading.js @@ -1,4 +1,5 @@ import React from 'react'; +import { t } from '@lingui/macro'; import styled from 'styled-components'; import { @@ -30,7 +31,7 @@ const ContentLoading = ({ className }) => ( component={TextVariants.small} style={{ fontWeight: 'bold', color: 'black' }} > - Please wait until the topology view is populated... + {t`Please wait until the topology view is populated...`} diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 67ddf3680a..4dc7488063 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -111,14 +111,14 @@ function MeshGraph({ showLegend, zoom }) { const graph = data; const simulation = d3 - .forceSimulation() + .forceSimulation(graph.nodes) .force( 'charge', d3.forceManyBody(defaultForceBody).strength(defaultForceStrength) ) .force( 'link', - d3.forceLink().id((d) => d.hostname) + d3.forceLink(graph.links).id((d) => d.hostname) ) .force('collide', d3.forceCollide(defaultCollisionFactor)) .force('forceX', d3.forceX(defaultForceX)) diff --git a/awx/ui/src/screens/TopologyView/Tooltip.js b/awx/ui/src/screens/TopologyView/Tooltip.js index 142f345448..f82a742158 100644 --- a/awx/ui/src/screens/TopologyView/Tooltip.js +++ b/awx/ui/src/screens/TopologyView/Tooltip.js @@ -69,7 +69,7 @@ function Tooltip({ Details From b1570302bc85f85afd3e3d9349bcceaf5e17024d Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Fri, 11 Feb 2022 14:02:37 -0800 Subject: [PATCH 027/125] Refactor: move constants and helper functions into their own files. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 196 ++++++------------ awx/ui/src/screens/TopologyView/Tooltip.js | 4 +- awx/ui/src/screens/TopologyView/constants.js | 38 ++++ .../src/screens/TopologyView/utils/helpers.js | 86 ++++++++ 4 files changed, 185 insertions(+), 139 deletions(-) create mode 100644 awx/ui/src/screens/TopologyView/constants.js create mode 100644 awx/ui/src/screens/TopologyView/utils/helpers.js diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 4dc7488063..3e949e6d05 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -1,14 +1,32 @@ import React, { useEffect, useState } from 'react'; import { useHistory } from 'react-router-dom'; import styled from 'styled-components'; -import { InstancesAPI } from 'api'; import debounce from 'util/debounce'; // import { t } from '@lingui/macro'; import * as d3 from 'd3'; import Legend from './Legend'; import Tooltip from './Tooltip'; import ContentLoading from './ContentLoading'; -import { truncateString } from '../../util/strings'; +import { + renderStateColor, + renderLabelText, + renderNodeType, + renderNodeIcon, + redirectToDetailsPage, + // generateRandomNodes, + // getRandomInt, +} from './utils/helpers'; +import { + MESH_FORCE_LAYOUT, + DEFAULT_RADIUS, + DEFAULT_NODE_COLOR, + DEFAULT_NODE_HIGHLIGHT_COLOR, + DEFAULT_NODE_LABEL_TEXT_COLOR, + DEFAULT_FONT_SIZE, + MARGIN, + HEIGHT, + FALLBACK_WIDTH, +} from './constants'; const Loader = styled(ContentLoading)` height: 100%; @@ -16,67 +34,15 @@ const Loader = styled(ContentLoading)` width: 100%; background: white; `; -// function MeshGraph({ data }) { -function MeshGraph({ showLegend, zoom }) { +function MeshGraph({ data, showLegend, zoom }) { + // function MeshGraph({ showLegend, zoom }) { const [isNodeSelected, setIsNodeSelected] = useState(false); const [selectedNode, setSelectedNode] = useState(null); const [nodeDetail, setNodeDetail] = useState(null); const history = useHistory(); - function getRandomInt(min, max) { - min = Math.ceil(min); - max = Math.floor(max); - return Math.floor(Math.random() * (max - min + 1)) + min; - } - const nodes = []; - const links = []; - const generateLinks = (n, r) => { - for (let i = 0; i < r; i++) { - const link = { - source: n[getRandomInt(0, n.length - 1)].hostname, - target: n[getRandomInt(0, n.length - 1)].hostname, - }; - links.push(link); - } - return { nodes: n, links }; - }; - const generateNodes = (n) => { - function getRandomType() { - return ['hybrid', 'execution', 'control', 'hop'][getRandomInt(0, 3)]; - } - function getRandomState() { - return ['healthy', 'error', 'disabled'][getRandomInt(0, 2)]; - } - for (let i = 0; i < n; i++) { - const id = i + 1; - const randomType = getRandomType(); - const randomState = getRandomState(); - const node = { - id, - hostname: `node-${id}`, - node_type: randomType, - node_state: randomState, - }; - nodes.push(node); - } - return generateLinks(nodes, getRandomInt(1, n - 1)); - }; - const data = generateNodes(getRandomInt(250, 250)); + // const data = generateRandomNodes(getRandomInt(4, 50)); const draw = () => { - const margin = 15; - const defaultRadius = 16; - const defaultCollisionFactor = 80; - const defaultForceStrength = -100; - const defaultForceBody = 75; - const defaultForceX = 0; - const defaultForceY = 0; - const height = 600; - const fallbackWidth = 700; - const defaultNodeColor = '#0066CC'; - const defaultNodeHighlightColor = '#16407C'; - const defaultNodeLabelColor = 'white'; - const defaultFontSize = '12px'; - const labelMaxLen = 15; const getWidth = () => { let width; // This is in an a try/catch due to an error from jest. @@ -84,10 +50,10 @@ function MeshGraph({ showLegend, zoom }) { // style function, it says it is null in the test try { width = - parseInt(d3.select(`#chart`).style('width'), 10) - margin || - fallbackWidth; + parseInt(d3.select(`#chart`).style('width'), 10) - MARGIN || + FALLBACK_WIDTH; } catch (error) { - width = fallbackWidth; + width = FALLBACK_WIDTH; } return width; @@ -100,13 +66,13 @@ function MeshGraph({ showLegend, zoom }) { .select('#chart') .append('svg') .attr('class', 'mesh-svg') - .attr('width', `${width + margin}px`) - .attr('height', `${height + margin}px`) - .attr('viewBox', [0, 0, width, height]); + .attr('width', `${width + MARGIN}px`) + .attr('height', `${HEIGHT + MARGIN}px`) + .attr('viewBox', [0, 0, width, HEIGHT]); const mesh = svg .append('g') .attr('class', 'mesh') - .attr('transform', `translate(${margin}, ${margin})`); + .attr('transform', `translate(${MARGIN}, ${MARGIN})`); const graph = data; @@ -114,16 +80,21 @@ function MeshGraph({ showLegend, zoom }) { .forceSimulation(graph.nodes) .force( 'charge', - d3.forceManyBody(defaultForceBody).strength(defaultForceStrength) + d3 + .forceManyBody(MESH_FORCE_LAYOUT.defaultForceBody) + .strength(MESH_FORCE_LAYOUT.defaultForceStrength) ) .force( 'link', d3.forceLink(graph.links).id((d) => d.hostname) ) - .force('collide', d3.forceCollide(defaultCollisionFactor)) - .force('forceX', d3.forceX(defaultForceX)) - .force('forceY', d3.forceY(defaultForceY)) - .force('center', d3.forceCenter(width / 2, height / 2)); + .force( + 'collide', + d3.forceCollide(MESH_FORCE_LAYOUT.defaultCollisionFactor) + ) + .force('forceX', d3.forceX(MESH_FORCE_LAYOUT.defaultForceX)) + .force('forceY', d3.forceY(MESH_FORCE_LAYOUT.defaultForceY)) + .force('center', d3.forceCenter(width / 2, HEIGHT / 2)); const link = mesh .append('g') @@ -133,7 +104,7 @@ function MeshGraph({ showLegend, zoom }) { .data(graph.links) .enter() .append('line') - .attr('class', (d, i) => `link-${i}`) + .attr('class', (_, i) => `link-${i}`) .attr('data-cy', (d) => `${d.source}-${d.target}`) .style('fill', 'none') .style('stroke', '#ccc') @@ -166,11 +137,11 @@ function MeshGraph({ showLegend, zoom }) { // node circles node .append('circle') - .attr('r', defaultRadius) + .attr('r', DEFAULT_RADIUS) .attr('class', (d) => d.node_type) .attr('class', (d) => `id-${d.id}`) - .attr('fill', defaultNodeColor) - .attr('stroke', defaultNodeLabelColor); + .attr('fill', DEFAULT_NODE_COLOR) + .attr('stroke', DEFAULT_NODE_LABEL_TEXT_COLOR); // node type labels node @@ -178,7 +149,7 @@ function MeshGraph({ showLegend, zoom }) { .text((d) => renderNodeType(d.node_type)) .attr('text-anchor', 'middle') .attr('alignment-baseline', 'central') - .attr('fill', defaultNodeLabelColor); + .attr('fill', DEFAULT_NODE_LABEL_TEXT_COLOR); // node hostname labels const hostNames = node.append('g'); @@ -186,7 +157,7 @@ function MeshGraph({ showLegend, zoom }) { .append('text') .text((d) => renderLabelText(d.node_state, d.hostname)) .attr('class', 'placeholder') - .attr('fill', defaultNodeLabelColor) + .attr('fill', DEFAULT_NODE_LABEL_TEXT_COLOR) .attr('text-anchor', 'middle') .attr('y', 40) .each(function calculateLabelWidth() { @@ -207,8 +178,8 @@ function MeshGraph({ showLegend, zoom }) { hostNames .append('text') .text((d) => renderLabelText(d.node_state, d.hostname)) - .attr('font-size', defaultFontSize) - .attr('fill', defaultNodeLabelColor) + .attr('font-size', DEFAULT_FONT_SIZE) + .attr('fill', DEFAULT_NODE_LABEL_TEXT_COLOR) .attr('text-anchor', 'middle') .attr('y', 38); @@ -216,7 +187,6 @@ function MeshGraph({ showLegend, zoom }) { simulation.force('link').links(graph.links); function ticked() { - // link.attr('d', linkArc); d3.select('.simulation-loader').style('visibility', 'visible'); link @@ -226,42 +196,16 @@ function MeshGraph({ showLegend, zoom }) { .attr('y2', (d) => d.target.y); node.attr('transform', (d) => `translate(${d.x},${d.y})`); - calculateAlphaDecay(simulation.alpha(), simulation.alphaMin(), 35); + calculateAlphaDecay(simulation.alpha(), simulation.alphaMin(), 20); } svg.call(zoom); - function renderStateColor(nodeState) { - const colorKey = { - disabled: '#6A6E73', - healthy: '#3E8635', - error: '#C9190B', - }; - return colorKey[nodeState]; - } - function renderLabelText(nodeState, name) { - const stateKey = { - disabled: '\u25EF', - healthy: '\u2713', - error: '\u0021', - }; - return `${stateKey[nodeState]} ${truncateString(name, labelMaxLen)}`; - } - - function renderNodeType(nodeType) { - const typeKey = { - hop: 'h', - execution: 'Ex', - hybrid: 'Hy', - control: 'C', - }; - - return typeKey[nodeType]; - } - function highlightSiblings(n) { setTimeout(() => { - svg.select(`circle.id-${n.id}`).attr('fill', defaultNodeHighlightColor); + svg + .select(`circle.id-${n.id}`) + .attr('fill', DEFAULT_NODE_HIGHLIGHT_COLOR); const immediate = graph.links.filter( (l) => n.hostname === l.source.hostname || n.hostname === l.target.hostname @@ -277,7 +221,7 @@ function MeshGraph({ showLegend, zoom }) { } function deselectSiblings(n) { - svg.select(`circle.id-${n.id}`).attr('fill', defaultNodeColor); + svg.select(`circle.id-${n.id}`).attr('fill', DEFAULT_NODE_COLOR); const immediate = graph.links.filter( (l) => n.hostname === l.source.hostname || n.hostname === l.target.hostname @@ -317,35 +261,11 @@ function MeshGraph({ showLegend, zoom }) { } }; - async function redirectToDetailsPage() { - // TODO: redirect to top-level instances details page - const { id: nodeId } = selectedNode; - const { - data: { results }, - } = await InstancesAPI.readInstanceGroup(nodeId); - const { id: instanceGroupId } = results[0]; - const constructedURL = `/instance_groups/${instanceGroupId}/instances/${nodeId}/details`; - history.push(constructedURL); - } - - function renderNodeIcon() { - if (selectedNode) { - const { node_type: nodeType } = selectedNode; - const typeKey = { - hop: 'h', - execution: 'Ex', - hybrid: 'Hy', - control: 'C', - }; - - return typeKey[nodeType]; - } - - return false; - } useEffect(() => { function handleResize() { d3.select('.simulation-loader').style('visibility', 'visible'); + setSelectedNode(null); + setIsNodeSelected(false); draw(); } window.addEventListener('resize', debounce(handleResize, 500)); @@ -358,9 +278,11 @@ function MeshGraph({ showLegend, zoom }) { {showLegend && } + redirectToDetailsPage(selectedNode, history) + } />
diff --git a/awx/ui/src/screens/TopologyView/Tooltip.js b/awx/ui/src/screens/TopologyView/Tooltip.js index f82a742158..34dae9c8a9 100644 --- a/awx/ui/src/screens/TopologyView/Tooltip.js +++ b/awx/ui/src/screens/TopologyView/Tooltip.js @@ -69,7 +69,7 @@ function Tooltip({ Details @@ -79,7 +79,7 @@ function Tooltip({ diff --git a/awx/ui/src/screens/TopologyView/constants.js b/awx/ui/src/screens/TopologyView/constants.js new file mode 100644 index 0000000000..642e2cadc7 --- /dev/null +++ b/awx/ui/src/screens/TopologyView/constants.js @@ -0,0 +1,38 @@ +/* eslint-disable-next-line import/prefer-default-export */ +export const MESH_FORCE_LAYOUT = { + defaultCollisionFactor: 80, + defaultForceStrength: -100, + defaultForceBody: 75, + defaultForceX: 0, + defaultForceY: 0, +}; + +export const DEFAULT_RADIUS = 16; +export const DEFAULT_NODE_COLOR = '#0066CC'; +export const DEFAULT_NODE_HIGHLIGHT_COLOR = '#16407C'; +export const DEFAULT_NODE_LABEL_TEXT_COLOR = 'white'; +export const DEFAULT_FONT_SIZE = '12px'; +export const LABEL_TEXT_MAX_LENGTH = 15; + +export const MARGIN = 15; +export const HEIGHT = 600; +export const FALLBACK_WIDTH = 700; + +export const NODE_STATE_COLOR_KEY = { + disabled: '#6A6E73', + healthy: '#3E8635', + error: '#C9190B', +}; + +export const NODE_STATE_HTML_ENTITY_KEY = { + disabled: '\u25EF', + healthy: '\u2713', + error: '\u0021', +}; + +export const NODE_TYPE_SYMBOL_KEY = { + hop: 'h', + execution: 'Ex', + hybrid: 'Hy', + control: 'C', +}; diff --git a/awx/ui/src/screens/TopologyView/utils/helpers.js b/awx/ui/src/screens/TopologyView/utils/helpers.js new file mode 100644 index 0000000000..cbb6158c5b --- /dev/null +++ b/awx/ui/src/screens/TopologyView/utils/helpers.js @@ -0,0 +1,86 @@ +import { InstancesAPI } from 'api'; +import { truncateString } from '../../../util/strings'; + +import { + NODE_STATE_COLOR_KEY, + NODE_STATE_HTML_ENTITY_KEY, + NODE_TYPE_SYMBOL_KEY, + LABEL_TEXT_MAX_LENGTH, +} from '../constants'; + +export function renderStateColor(nodeState) { + return NODE_STATE_COLOR_KEY[nodeState]; +} + +export function renderLabelText(nodeState, name) { + return `${NODE_STATE_HTML_ENTITY_KEY[nodeState]} ${truncateString( + name, + LABEL_TEXT_MAX_LENGTH + )}`; +} + +export function renderNodeType(nodeType) { + return NODE_TYPE_SYMBOL_KEY[nodeType]; +} + +export function renderNodeIcon(selectedNode) { + if (selectedNode) { + const { node_type: nodeType } = selectedNode; + return NODE_TYPE_SYMBOL_KEY[nodeType]; + } + + return false; +} + +export async function redirectToDetailsPage(selectedNode, history) { + // TODO: redirect to top-level instances details page + const { id: nodeId } = selectedNode; + const { + data: { results }, + } = await InstancesAPI.readInstanceGroup(nodeId); + const { id: instanceGroupId } = results[0]; + const constructedURL = `/instance_groups/${instanceGroupId}/instances/${nodeId}/details`; + history.push(constructedURL); +} + +// DEBUG TOOLS +export function getRandomInt(min, max) { + min = Math.ceil(min); + max = Math.floor(max); + return Math.floor(Math.random() * (max - min + 1)) + min; +} + +const generateRandomLinks = (n, r) => { + const links = []; + for (let i = 0; i < r; i++) { + const link = { + source: n[getRandomInt(0, n.length - 1)].hostname, + target: n[getRandomInt(0, n.length - 1)].hostname, + }; + links.push(link); + } + return { nodes: n, links }; +}; + +export const generateRandomNodes = (n) => { + const nodes = []; + function getRandomType() { + return ['hybrid', 'execution', 'control', 'hop'][getRandomInt(0, 3)]; + } + function getRandomState() { + return ['healthy', 'error', 'disabled'][getRandomInt(0, 2)]; + } + for (let i = 0; i < n; i++) { + const id = i + 1; + const randomType = getRandomType(); + const randomState = getRandomState(); + const node = { + id, + hostname: `node-${id}`, + node_type: randomType, + node_state: randomState, + }; + nodes.push(node); + } + return generateRandomLinks(nodes, getRandomInt(1, n - 1)); +}; From c102bf05af95b88b05fb56c800ba9e0684d83a1b Mon Sep 17 00:00:00 2001 From: kialam Date: Wed, 16 Feb 2022 14:55:37 -0800 Subject: [PATCH 028/125] Update awx/ui/src/screens/TopologyView/MeshGraph.js MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Tiago Góes --- awx/ui/src/screens/TopologyView/MeshGraph.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 3e949e6d05..d3114df877 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -105,7 +105,7 @@ function MeshGraph({ data, showLegend, zoom }) { .enter() .append('line') .attr('class', (_, i) => `link-${i}`) - .attr('data-cy', (d) => `${d.source}-${d.target}`) + .attr('data-cy', (d) => `${d.source.hostname}-${d.target.hostname}`) .style('fill', 'none') .style('stroke', '#ccc') .style('stroke-width', '2px') From af1845369127c523d1cc7552cfaba64145c67c52 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 17 Feb 2022 10:18:38 -0800 Subject: [PATCH 029/125] Use 100% height. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 38 +++++-------------- .../src/screens/TopologyView/TopologyView.js | 8 ++-- awx/ui/src/screens/TopologyView/constants.js | 2 +- .../src/screens/TopologyView/utils/helpers.js | 9 +++++ 4 files changed, 24 insertions(+), 33 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index d3114df877..df93bdeddb 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -13,6 +13,8 @@ import { renderNodeType, renderNodeIcon, redirectToDetailsPage, + getHeight, + getWidth, // generateRandomNodes, // getRandomInt, } from './utils/helpers'; @@ -23,9 +25,7 @@ import { DEFAULT_NODE_HIGHLIGHT_COLOR, DEFAULT_NODE_LABEL_TEXT_COLOR, DEFAULT_FONT_SIZE, - MARGIN, - HEIGHT, - FALLBACK_WIDTH, + SELECTOR, } from './constants'; const Loader = styled(ContentLoading)` @@ -43,22 +43,8 @@ function MeshGraph({ data, showLegend, zoom }) { // const data = generateRandomNodes(getRandomInt(4, 50)); const draw = () => { - const getWidth = () => { - let width; - // This is in an a try/catch due to an error from jest. - // Even though the d3.select returns a valid selector with - // style function, it says it is null in the test - try { - width = - parseInt(d3.select(`#chart`).style('width'), 10) - MARGIN || - FALLBACK_WIDTH; - } catch (error) { - width = FALLBACK_WIDTH; - } - - return width; - }; - const width = getWidth(); + const width = getWidth(SELECTOR); + const height = getHeight(SELECTOR); /* Add SVG */ d3.selectAll(`#chart > svg`).remove(); @@ -66,13 +52,9 @@ function MeshGraph({ data, showLegend, zoom }) { .select('#chart') .append('svg') .attr('class', 'mesh-svg') - .attr('width', `${width + MARGIN}px`) - .attr('height', `${HEIGHT + MARGIN}px`) - .attr('viewBox', [0, 0, width, HEIGHT]); - const mesh = svg - .append('g') - .attr('class', 'mesh') - .attr('transform', `translate(${MARGIN}, ${MARGIN})`); + .attr('width', `${width}px`) + .attr('height', `100%`); + const mesh = svg.append('g').attr('class', 'mesh'); const graph = data; @@ -94,7 +76,7 @@ function MeshGraph({ data, showLegend, zoom }) { ) .force('forceX', d3.forceX(MESH_FORCE_LAYOUT.defaultForceX)) .force('forceY', d3.forceY(MESH_FORCE_LAYOUT.defaultForceY)) - .force('center', d3.forceCenter(width / 2, HEIGHT / 2)); + .force('center', d3.forceCenter(width / 2, height / 2)); const link = mesh .append('g') @@ -274,7 +256,7 @@ function MeshGraph({ data, showLegend, zoom }) { }, []); // eslint-disable-line react-hooks/exhaustive-deps return ( -
+
{showLegend && } { - const margin = 15; - const height = 600; - const width = parseInt(d3.select(`#chart`).style('width'), 10) - margin; + const parent = d3.select('.mesh').node().parentElement; + const width = parent.clientWidth; + const height = parent.clientHeight; d3.select('.mesh-svg') .transition() .duration(750) @@ -85,7 +85,7 @@ function TopologyView() { resetZoom={resetZoom} /> - + {!isLoading && ( diff --git a/awx/ui/src/screens/TopologyView/constants.js b/awx/ui/src/screens/TopologyView/constants.js index 642e2cadc7..f4538dea63 100644 --- a/awx/ui/src/screens/TopologyView/constants.js +++ b/awx/ui/src/screens/TopologyView/constants.js @@ -1,4 +1,5 @@ /* eslint-disable-next-line import/prefer-default-export */ +export const SELECTOR = '#chart'; export const MESH_FORCE_LAYOUT = { defaultCollisionFactor: 80, defaultForceStrength: -100, @@ -15,7 +16,6 @@ export const DEFAULT_FONT_SIZE = '12px'; export const LABEL_TEXT_MAX_LENGTH = 15; export const MARGIN = 15; -export const HEIGHT = 600; export const FALLBACK_WIDTH = 700; export const NODE_STATE_COLOR_KEY = { diff --git a/awx/ui/src/screens/TopologyView/utils/helpers.js b/awx/ui/src/screens/TopologyView/utils/helpers.js index cbb6158c5b..866b0fc029 100644 --- a/awx/ui/src/screens/TopologyView/utils/helpers.js +++ b/awx/ui/src/screens/TopologyView/utils/helpers.js @@ -1,3 +1,4 @@ +import * as d3 from 'd3'; import { InstancesAPI } from 'api'; import { truncateString } from '../../../util/strings'; @@ -84,3 +85,11 @@ export const generateRandomNodes = (n) => { } return generateRandomLinks(nodes, getRandomInt(1, n - 1)); }; + +export function getWidth(selector) { + return selector ? d3.select(selector).node().clientWidth : 700; +} + +export function getHeight(selector) { + return selector !== null ? d3.select(selector).node().clientHeight : 600; +} From 8993dc706a439374d20085e8ee6d5e6b1fa685c9 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 17 Feb 2022 10:53:27 -0800 Subject: [PATCH 030/125] Redirect to Instances/{nodeId}/details page. --- awx/ui/src/screens/TopologyView/utils/helpers.js | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/utils/helpers.js b/awx/ui/src/screens/TopologyView/utils/helpers.js index 866b0fc029..cb185cbe61 100644 --- a/awx/ui/src/screens/TopologyView/utils/helpers.js +++ b/awx/ui/src/screens/TopologyView/utils/helpers.js @@ -1,5 +1,4 @@ import * as d3 from 'd3'; -import { InstancesAPI } from 'api'; import { truncateString } from '../../../util/strings'; import { @@ -34,13 +33,8 @@ export function renderNodeIcon(selectedNode) { } export async function redirectToDetailsPage(selectedNode, history) { - // TODO: redirect to top-level instances details page const { id: nodeId } = selectedNode; - const { - data: { results }, - } = await InstancesAPI.readInstanceGroup(nodeId); - const { id: instanceGroupId } = results[0]; - const constructedURL = `/instance_groups/${instanceGroupId}/instances/${nodeId}/details`; + const constructedURL = `/instances/${nodeId}/details`; history.push(constructedURL); } From 0d1898e72df4b76d95cfb727f321733f5ab4f7eb Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 17 Feb 2022 13:26:03 -0800 Subject: [PATCH 031/125] Add error screen. --- .../src/screens/TopologyView/TopologyView.js | 36 +++++++++++++------ 1 file changed, 25 insertions(+), 11 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/TopologyView.js b/awx/ui/src/screens/TopologyView/TopologyView.js index 523f13e919..b689390090 100644 --- a/awx/ui/src/screens/TopologyView/TopologyView.js +++ b/awx/ui/src/screens/TopologyView/TopologyView.js @@ -2,6 +2,7 @@ import React, { useEffect, useCallback, useState } from 'react'; import * as d3 from 'd3'; import { t } from '@lingui/macro'; import { PageSection, Card, CardBody } from '@patternfly/react-core'; +import ContentError from 'components/ContentError'; import useRequest from 'hooks/useRequest'; import { MeshAPI } from 'api'; import Header from './Header'; @@ -12,7 +13,7 @@ function TopologyView() { const { isLoading, result: { meshData }, - // error: fetchInitialError, + error: fetchInitialError, request: fetchMeshVisualizer, } = useRequest( useCallback(async () => { @@ -72,7 +73,6 @@ function TopologyView() { .duration(750) .call(zoom.transform, d3.zoomIdentity.translate(x, y).scale(scale)); }; - return ( <>
- - - - {!isLoading && ( - - )} - - - + {fetchInitialError ? ( + + + + + + + + ) : ( + + + + {!isLoading && ( + + )} + + + + )} ); } From ef5cd66494ef69d26ca9efe1c5b893462426034a Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Thu, 17 Feb 2022 13:58:16 -0800 Subject: [PATCH 032/125] Excise disable-lint rules. --- awx/ui/src/screens/TopologyView/Legend.js | 30 ++++++++++---------- awx/ui/src/screens/TopologyView/Tooltip.js | 26 +++++++++-------- awx/ui/src/screens/TopologyView/constants.js | 1 - 3 files changed, 29 insertions(+), 28 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/Legend.js b/awx/ui/src/screens/TopologyView/Legend.js index 2d729e5737..5fe35beb51 100644 --- a/awx/ui/src/screens/TopologyView/Legend.js +++ b/awx/ui/src/screens/TopologyView/Legend.js @@ -1,5 +1,5 @@ -/* eslint-disable i18next/no-literal-string */ import React from 'react'; +import { t } from '@lingui/macro'; import styled from 'styled-components'; import { Button as PFButton, @@ -62,49 +62,49 @@ function Legend() { component={TextVariants.small} style={{ fontWeight: 'bold', color: 'black' }} > - Legend + {t`Legend`} - Node types + {t`Node types`} - Control node + {t`Control node`} - Execution node + {t`Execution node`} - Hybrid node + {t`Hybrid node`} - Hop node + {t`Hop node`} - Status types + {t`Status types`} @@ -115,13 +115,13 @@ function Legend() { style={{ border: '1px solid gray', backgroundColor: '#3E8635' }} /> - Healthy + {t`Healthy`} - {nodeDetail.hostname} + + {nodeDetail.hostname} + - Type + {t`Type`} - {nodeDetail.node_type} node + {nodeDetail.node_type} {t`node`} - Status + {t`Status`} diff --git a/awx/ui/src/screens/TopologyView/constants.js b/awx/ui/src/screens/TopologyView/constants.js index f4538dea63..dbecb633f8 100644 --- a/awx/ui/src/screens/TopologyView/constants.js +++ b/awx/ui/src/screens/TopologyView/constants.js @@ -1,4 +1,3 @@ -/* eslint-disable-next-line import/prefer-default-export */ export const SELECTOR = '#chart'; export const MESH_FORCE_LAYOUT = { defaultCollisionFactor: 80, From a60a65cd2a49a72e15d69d11191c5a4c8fe8ca4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Feb 2022 15:06:19 +0000 Subject: [PATCH 033/125] Bump url-parse from 1.5.3 to 1.5.9 in /awx/ui Bumps [url-parse](https://github.com/unshiftio/url-parse) from 1.5.3 to 1.5.9. - [Release notes](https://github.com/unshiftio/url-parse/releases) - [Commits](https://github.com/unshiftio/url-parse/compare/1.5.3...1.5.9) --- updated-dependencies: - dependency-name: url-parse dependency-type: indirect ... Signed-off-by: dependabot[bot] --- awx/ui/package-lock.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/awx/ui/package-lock.json b/awx/ui/package-lock.json index b7796452cd..1eada88b0e 100644 --- a/awx/ui/package-lock.json +++ b/awx/ui/package-lock.json @@ -66,7 +66,7 @@ "react-scripts": "5.0.0" }, "engines": { - "node": "14.x" + "node": ">=16.14.0" } }, "node_modules/@babel/code-frame": { @@ -20507,9 +20507,9 @@ } }, "node_modules/url-parse": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.3.tgz", - "integrity": "sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ==", + "version": "1.5.9", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.9.tgz", + "integrity": "sha512-HpOvhKBvre8wYez+QhHcYiVvVmeF6DVnuSOOPhe3cTum3BnqHhvKaZm8FU5yTiOu/Jut2ZpB2rA/SbBA1JIGlQ==", "dev": true, "dependencies": { "querystringify": "^2.1.1", @@ -37195,9 +37195,9 @@ } }, "url-parse": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.3.tgz", - "integrity": "sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ==", + "version": "1.5.9", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.9.tgz", + "integrity": "sha512-HpOvhKBvre8wYez+QhHcYiVvVmeF6DVnuSOOPhe3cTum3BnqHhvKaZm8FU5yTiOu/Jut2ZpB2rA/SbBA1JIGlQ==", "dev": true, "requires": { "querystringify": "^2.1.1", From 7cf0523561a72d597450b14f56fa1592e3b9743c Mon Sep 17 00:00:00 2001 From: Kersom <9053044+nixocio@users.noreply.github.com> Date: Mon, 21 Feb 2022 15:53:32 -0500 Subject: [PATCH 034/125] Display roles for organization listed when using non-English web browser (#11762) Display roles for organization listed when using non-English web browser --- .../ResourceAccessList/ResourceAccessList.js | 44 +++++++++---------- 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/awx/ui/src/components/ResourceAccessList/ResourceAccessList.js b/awx/ui/src/components/ResourceAccessList/ResourceAccessList.js index e4892e0371..0817928fbd 100644 --- a/awx/ui/src/components/ResourceAccessList/ResourceAccessList.js +++ b/awx/ui/src/components/ResourceAccessList/ResourceAccessList.js @@ -56,31 +56,29 @@ function ResourceAccessList({ apiModel, resource }) { let orgRoles; if (location.pathname.includes('/organizations')) { - const { - data: { results: roles }, - } = await RolesAPI.read({ content_type__isnull: true }); - const sysAdmin = roles.filter( - (role) => role.name === 'System Administrator' - ); - const sysAud = roles.filter((role) => { - let auditor; - if (role.name === 'System Auditor') { - auditor = role.id; - } - return auditor; - }); + const [ + { + data: { results: systemAdmin }, + }, + { + data: { results: systemAuditor }, + }, + ] = await Promise.all([ + RolesAPI.read({ singleton_name: 'system_administrator' }), + RolesAPI.read({ singleton_name: 'system_auditor' }), + ]); - orgRoles = Object.values(resource.summary_fields.object_roles).map( - (opt) => { - let item; - if (opt.name === 'Admin') { - item = [`${opt.id}, ${sysAdmin[0].id}`, opt.name]; - } else if (sysAud[0].id && opt.name === 'Auditor') { - item = [`${sysAud[0].id}, ${opt.id}`, opt.name]; - } else { - item = [`${opt.id}`, opt.name]; + orgRoles = Object.entries(resource.summary_fields.object_roles).map( + ([key, value]) => { + if (key === 'admin_role') { + return [`${value.id}, ${systemAdmin[0].id}`, value.name]; } - return item; + + if (key === 'auditor_role') { + return [`${value.id}, ${systemAuditor[0].id}`, value.name]; + } + + return [`${value.id}`, value.name]; } ); } From eb859b98124e5a0cfa0fbfe5651412700a2c7036 Mon Sep 17 00:00:00 2001 From: Kersom <9053044+nixocio@users.noreply.github.com> Date: Mon, 21 Feb 2022 16:34:31 -0500 Subject: [PATCH 035/125] Fix TypeError when running a command on a host in a smart inventory (#11768) Fix TypeError when running a command on a host in a smart inventory See: https://github.com/ansible/awx/issues/11611 --- .../components/AdHocCommands/AdHocCommands.js | 3 +++ .../AdHocCommands/AdHocCommands.test.js | 4 ++++ .../SmartInventoryHostList.js | 17 +++++++++++++---- .../SmartInventoryHostList.test.js | 15 +++++++++++++++ 4 files changed, 35 insertions(+), 4 deletions(-) diff --git a/awx/ui/src/components/AdHocCommands/AdHocCommands.js b/awx/ui/src/components/AdHocCommands/AdHocCommands.js index 7ddee926bf..5dd69d91a3 100644 --- a/awx/ui/src/components/AdHocCommands/AdHocCommands.js +++ b/awx/ui/src/components/AdHocCommands/AdHocCommands.js @@ -59,6 +59,7 @@ function AdHocCommands({ useEffect(() => { fetchData(); }, [fetchData]); + const { isLoading: isLaunchLoading, error: launchError, @@ -172,6 +173,8 @@ function AdHocCommands({ AdHocCommands.propTypes = { adHocItems: PropTypes.arrayOf(PropTypes.object).isRequired, hasListItems: PropTypes.bool.isRequired, + onLaunchLoading: PropTypes.func.isRequired, + moduleOptions: PropTypes.arrayOf(PropTypes.array).isRequired, }; export default AdHocCommands; diff --git a/awx/ui/src/components/AdHocCommands/AdHocCommands.test.js b/awx/ui/src/components/AdHocCommands/AdHocCommands.test.js index 25eb74ffbc..6e51fb3522 100644 --- a/awx/ui/src/components/AdHocCommands/AdHocCommands.test.js +++ b/awx/ui/src/components/AdHocCommands/AdHocCommands.test.js @@ -73,6 +73,10 @@ describe('', () => { adHocItems={adHocItems} hasListItems onLaunchLoading={() => jest.fn()} + moduleOptions={[ + ['command', 'command'], + ['shell', 'shell'], + ]} /> ); }); diff --git a/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.js b/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.js index 2a611891ae..747e7bd058 100644 --- a/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.js +++ b/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.js @@ -25,25 +25,33 @@ function SmartInventoryHostList({ inventory }) { const location = useLocation(); const [isAdHocLaunchLoading, setIsAdHocLaunchLoading] = useState(false); const { - result: { hosts, count }, + result: { hosts, count, moduleOptions }, error: contentError, isLoading, request: fetchHosts, } = useRequest( useCallback(async () => { const params = parseQueryString(QS_CONFIG, location.search); - const { - data: { results, count: hostCount }, - } = await InventoriesAPI.readHosts(inventory.id, params); + const [ + { + data: { results, count: hostCount }, + }, + adHocOptions, + ] = await Promise.all([ + InventoriesAPI.readHosts(inventory.id, params), + InventoriesAPI.readAdHocOptions(inventory.id), + ]); return { hosts: results, count: hostCount, + moduleOptions: adHocOptions.data.actions.GET.module_name.choices, }; }, [location.search, inventory.id]), { hosts: [], count: 0, + moduleOptions: [], } ); @@ -91,6 +99,7 @@ function SmartInventoryHostList({ inventory }) { adHocItems={selected} hasListItems={count > 0} onLaunchLoading={setIsAdHocLaunchLoading} + moduleOptions={moduleOptions} />, ] : [] diff --git a/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.test.js b/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.test.js index 1639c80f50..0b87981836 100644 --- a/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.test.js +++ b/awx/ui/src/screens/Inventory/SmartInventoryHosts/SmartInventoryHostList.test.js @@ -27,6 +27,21 @@ describe('', () => { InventoriesAPI.readHosts.mockResolvedValue({ data: mockHosts, }); + InventoriesAPI.readAdHocOptions.mockResolvedValue({ + data: { + actions: { + GET: { + module_name: { + choices: [ + ['command', 'command'], + ['shell', 'shell'], + ], + }, + }, + POST: {}, + }, + }, + }); await act(async () => { wrapper = mountWithContexts( From 039c038cd70a61251cd6ba495d34ca2a97f651a2 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Tue, 22 Feb 2022 09:36:43 -0800 Subject: [PATCH 036/125] Move zoom methods into a hook. --- .../src/screens/TopologyView/TopologyView.js | 52 ++----------- awx/ui/src/screens/TopologyView/constants.js | 2 + .../src/screens/TopologyView/utils/useZoom.js | 74 +++++++++++++++++++ 3 files changed, 82 insertions(+), 46 deletions(-) create mode 100644 awx/ui/src/screens/TopologyView/utils/useZoom.js diff --git a/awx/ui/src/screens/TopologyView/TopologyView.js b/awx/ui/src/screens/TopologyView/TopologyView.js index b689390090..bcb1fb4a62 100644 --- a/awx/ui/src/screens/TopologyView/TopologyView.js +++ b/awx/ui/src/screens/TopologyView/TopologyView.js @@ -1,5 +1,4 @@ import React, { useEffect, useCallback, useState } from 'react'; -import * as d3 from 'd3'; import { t } from '@lingui/macro'; import { PageSection, Card, CardBody } from '@patternfly/react-core'; import ContentError from 'components/ContentError'; @@ -7,6 +6,8 @@ import useRequest from 'hooks/useRequest'; import { MeshAPI } from 'api'; import Header from './Header'; import MeshGraph from './MeshGraph'; +import useZoom from './utils/useZoom'; +import { CHILDSELECTOR, PARENTSELECTOR } from './constants'; function TopologyView() { const [showLegend, setShowLegend] = useState(true); @@ -27,52 +28,11 @@ function TopologyView() { useEffect(() => { fetchMeshVisualizer(); }, [fetchMeshVisualizer]); + const { zoom, zoomFit, zoomIn, zoomOut, resetZoom } = useZoom( + PARENTSELECTOR, + CHILDSELECTOR + ); - const zoom = d3.zoom().on('zoom', ({ transform }) => { - d3.select('.mesh').attr('transform', transform); - }); - const zoomIn = () => { - d3.select('.mesh-svg').transition().call(zoom.scaleBy, 2); - }; - const zoomOut = () => { - d3.select('.mesh-svg').transition().call(zoom.scaleBy, 0.5); - }; - const resetZoom = () => { - const parent = d3.select('.mesh').node().parentElement; - const width = parent.clientWidth; - const height = parent.clientHeight; - d3.select('.mesh-svg') - .transition() - .duration(750) - .call( - zoom.transform, - d3.zoomIdentity, - d3 - .zoomTransform(d3.select('.mesh-svg').node()) - .invert([width / 2, height / 2]) - ); - }; - - const zoomFit = () => { - const bounds = d3.select('.mesh').node().getBBox(); - const parent = d3.select('.mesh').node().parentElement; - const fullWidth = parent.clientWidth; - const fullHeight = parent.clientHeight; - const { width, height } = bounds; - const midX = bounds.x + width / 2; - const midY = bounds.y + height / 2; - if (width === 0 || height === 0) return; // nothing to fit - const scale = 0.8 / Math.max(width / fullWidth, height / fullHeight); - const translate = [ - fullWidth / 2 - scale * midX, - fullHeight / 2 - scale * midY, - ]; - const [x, y] = translate; - d3.select('.mesh-svg') - .transition() - .duration(750) - .call(zoom.transform, d3.zoomIdentity.translate(x, y).scale(scale)); - }; return ( <>
+ * <-- parent --> + * <-- child --> + * + *
+ * Returns: { + * zoom: d3 zoom behavior/object/function to apply on selected elements + * zoomIn: function that zooms in + * zoomOut: function that zooms out + * zoomFit: function that scales child element to fit within parent element + * resetZoom: function resets the zoom level to its initial value + * } + */ + +export default function useZoom(parentSelector, childSelector) { + const zoom = d3.zoom().on('zoom', ({ transform }) => { + d3.select(childSelector).attr('transform', transform); + }); + const zoomIn = () => { + d3.select(parentSelector).transition().call(zoom.scaleBy, 2); + }; + const zoomOut = () => { + d3.select(parentSelector).transition().call(zoom.scaleBy, 0.5); + }; + const resetZoom = () => { + const parent = d3.select(parentSelector).node(); + const width = parent.clientWidth; + const height = parent.clientHeight; + d3.select(parentSelector) + .transition() + .duration(750) + .call( + zoom.transform, + d3.zoomIdentity, + d3 + .zoomTransform(d3.select(parentSelector).node()) + .invert([width / 2, height / 2]) + ); + }; + const zoomFit = () => { + const bounds = d3.select(childSelector).node().getBBox(); + const fullWidth = getWidth(parentSelector); + const fullHeight = getHeight(parentSelector); + const { width, height } = bounds; + const midX = bounds.x + width / 2; + const midY = bounds.y + height / 2; + if (width === 0 || height === 0) return; // nothing to fit + const scale = 0.8 / Math.max(width / fullWidth, height / fullHeight); + const translate = [ + fullWidth / 2 - scale * midX, + fullHeight / 2 - scale * midY, + ]; + const [x, y] = translate; + d3.select(parentSelector) + .transition() + .duration(750) + .call(zoom.transform, d3.zoomIdentity.translate(x, y).scale(scale)); + }; + + return { + zoom, + zoomIn, + zoomOut, + zoomFit, + resetZoom, + }; +} From fee47fe347124e0201c88bddd9c0a752e577635c Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Mon, 21 Feb 2022 15:49:48 -0800 Subject: [PATCH 037/125] Vertically center node type symbols on Firefox. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index df93bdeddb..1a9420f907 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -130,7 +130,7 @@ function MeshGraph({ data, showLegend, zoom }) { .append('text') .text((d) => renderNodeType(d.node_type)) .attr('text-anchor', 'middle') - .attr('alignment-baseline', 'central') + .attr('dominant-baseline', 'central') .attr('fill', DEFAULT_NODE_LABEL_TEXT_COLOR); // node hostname labels From fa47e48a158f72fba9bea0cb723331716f681697 Mon Sep 17 00:00:00 2001 From: Alex Corey Date: Tue, 22 Feb 2022 16:34:30 -0500 Subject: [PATCH 038/125] Fixes broken link from User to UserOrg (#11759) --- .../User/UserOrganizations/UserOrganizationList.js | 8 ++++---- .../User/UserOrganizations/UserOrganizationList.test.js | 2 +- .../User/UserOrganizations/UserOrganizationListItem.js | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/awx/ui/src/screens/User/UserOrganizations/UserOrganizationList.js b/awx/ui/src/screens/User/UserOrganizations/UserOrganizationList.js index a4b19840ce..a67ff5fa77 100644 --- a/awx/ui/src/screens/User/UserOrganizations/UserOrganizationList.js +++ b/awx/ui/src/screens/User/UserOrganizations/UserOrganizationList.js @@ -20,7 +20,7 @@ const QS_CONFIG = getQSConfig('organizations', { function UserOrganizationList() { const location = useLocation(); - const { id: userId } = useParams(); + const { id } = useParams(); const { result: { organizations, count, searchableKeys, relatedSearchableKeys }, @@ -36,8 +36,8 @@ function UserOrganizationList() { }, actions, ] = await Promise.all([ - UsersAPI.readOrganizations(userId, params), - UsersAPI.readOrganizationOptions(), + UsersAPI.readOrganizations(id, params), + UsersAPI.readOrganizationOptions(id), ]); return { searchableKeys: Object.keys(actions.data.actions?.GET || {}).filter( @@ -49,7 +49,7 @@ function UserOrganizationList() { organizations: results, count: orgCount, }; - }, [userId, location.search]), + }, [id, location.search]), { organizations: [], count: 0, diff --git a/awx/ui/src/screens/User/UserOrganizations/UserOrganizationList.test.js b/awx/ui/src/screens/User/UserOrganizations/UserOrganizationList.test.js index f07c787e79..f62954e175 100644 --- a/awx/ui/src/screens/User/UserOrganizations/UserOrganizationList.test.js +++ b/awx/ui/src/screens/User/UserOrganizations/UserOrganizationList.test.js @@ -72,6 +72,6 @@ describe('', () => { page_size: 20, type: 'organization', }); - expect(UsersAPI.readOrganizationOptions).toBeCalled(); + expect(UsersAPI.readOrganizationOptions).toBeCalledWith('1'); }); }); diff --git a/awx/ui/src/screens/User/UserOrganizations/UserOrganizationListItem.js b/awx/ui/src/screens/User/UserOrganizations/UserOrganizationListItem.js index 423131f86e..98535c15dd 100644 --- a/awx/ui/src/screens/User/UserOrganizations/UserOrganizationListItem.js +++ b/awx/ui/src/screens/User/UserOrganizations/UserOrganizationListItem.js @@ -12,7 +12,7 @@ export default function UserOrganizationListItem({ organization }) { > - {organization.name} + {organization.name} {organization.description} From 7ebf6b77e5d39bc27fa5c18f55eb48eff8ae0434 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Tue, 22 Feb 2022 13:45:30 -0800 Subject: [PATCH 039/125] Disable zoom controls until mesh layout is finalized. --- awx/ui/src/screens/TopologyView/Header.js | 5 +++++ awx/ui/src/screens/TopologyView/MeshGraph.js | 4 +++- awx/ui/src/screens/TopologyView/TopologyView.js | 3 +++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/awx/ui/src/screens/TopologyView/Header.js b/awx/ui/src/screens/TopologyView/Header.js index 4929c8ceb1..1b287023e5 100644 --- a/awx/ui/src/screens/TopologyView/Header.js +++ b/awx/ui/src/screens/TopologyView/Header.js @@ -26,6 +26,7 @@ const Header = ({ zoomOut, resetZoom, zoomFit, + showZoomControls, }) => { const { light } = PageSectionVariants; return ( @@ -54,6 +55,7 @@ const Header = ({ variant="plain" icon={} onClick={zoomIn} + isDisabled={!showZoomControls} > @@ -65,6 +67,7 @@ const Header = ({ variant="plain" icon={} onClick={zoomOut} + isDisabled={!showZoomControls} > @@ -76,6 +79,7 @@ const Header = ({ variant="plain" icon={} onClick={zoomFit} + isDisabled={!showZoomControls} > @@ -87,6 +91,7 @@ const Header = ({ variant="plain" icon={} onClick={resetZoom} + isDisabled={!showZoomControls} > diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 1a9420f907..1504f4bdae 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -34,7 +34,7 @@ const Loader = styled(ContentLoading)` width: 100%; background: white; `; -function MeshGraph({ data, showLegend, zoom }) { +function MeshGraph({ data, showLegend, zoom, setShowZoomControls }) { // function MeshGraph({ showLegend, zoom }) { const [isNodeSelected, setIsNodeSelected] = useState(false); const [selectedNode, setSelectedNode] = useState(null); @@ -236,9 +236,11 @@ function MeshGraph({ data, showLegend, zoom }) { } function calculateAlphaDecay(a, aMin, x) { + setShowZoomControls(false); const decayPercentage = Math.min((aMin / a) * 100); if (decayPercentage >= x) { d3.select('.simulation-loader').style('visibility', 'hidden'); + setShowZoomControls(true); } } }; diff --git a/awx/ui/src/screens/TopologyView/TopologyView.js b/awx/ui/src/screens/TopologyView/TopologyView.js index bcb1fb4a62..6ef10ca9da 100644 --- a/awx/ui/src/screens/TopologyView/TopologyView.js +++ b/awx/ui/src/screens/TopologyView/TopologyView.js @@ -11,6 +11,7 @@ import { CHILDSELECTOR, PARENTSELECTOR } from './constants'; function TopologyView() { const [showLegend, setShowLegend] = useState(true); + const [showZoomControls, setShowZoomControls] = useState(false); const { isLoading, result: { meshData }, @@ -43,6 +44,7 @@ function TopologyView() { zoomOut={zoomOut} zoomFit={zoomFit} resetZoom={resetZoom} + showZoomControls={showZoomControls} /> {fetchInitialError ? ( @@ -61,6 +63,7 @@ function TopologyView() { data={meshData} showLegend={showLegend} zoom={zoom} + setShowZoomControls={setShowZoomControls} /> )} From 61323c7f85a55ed80bf9d20be891befb096fc1fd Mon Sep 17 00:00:00 2001 From: Nikhil Jain Date: Wed, 23 Feb 2022 11:30:55 +0530 Subject: [PATCH 040/125] allow more than 400 credential types in drop down while adding new credential --- .../Credential/CredentialAdd/CredentialAdd.js | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js b/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js index f4940f7d9b..73f3bf1ea1 100644 --- a/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js +++ b/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js @@ -86,14 +86,19 @@ function CredentialAdd({ me }) { const { data } = await CredentialTypesAPI.read({ page_size: 200 }); const credTypes = data.results; if (data.next && data.next.includes('page=2')) { - const { - data: { results }, - } = await CredentialTypesAPI.read({ - page_size: 200, - page: 2, - }); - credTypes.concat(results); - } + let pageNo = 2; + /* eslint-disable no-await-in-loop */ + do { + const { + data: { results }, + } = await CredentialTypesAPI.read({ + page_size: 200, + page: pageNo, + }); + credTypes.push(...results); + pageNo++; + } while (data.count !== credTypes.length); + } /* eslint-enable no-await-in-loop */ const creds = credTypes.reduce((credentialTypesMap, credentialType) => { credentialTypesMap[credentialType.id] = credentialType; From dc2a392f4cf7f52fa322d73b1e7cd32cc5864420 Mon Sep 17 00:00:00 2001 From: Nikhil Jain Date: Wed, 23 Feb 2022 12:09:51 +0530 Subject: [PATCH 041/125] forgot to run prettier earlier --- awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js b/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js index 73f3bf1ea1..af5d182616 100644 --- a/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js +++ b/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js @@ -87,7 +87,7 @@ function CredentialAdd({ me }) { const credTypes = data.results; if (data.next && data.next.includes('page=2')) { let pageNo = 2; - /* eslint-disable no-await-in-loop */ + /* eslint-disable no-await-in-loop */ do { const { data: { results }, @@ -98,7 +98,7 @@ function CredentialAdd({ me }) { credTypes.push(...results); pageNo++; } while (data.count !== credTypes.length); - } /* eslint-enable no-await-in-loop */ + } /* eslint-enable no-await-in-loop */ const creds = credTypes.reduce((credentialTypesMap, credentialType) => { credentialTypesMap[credentialType.id] = credentialType; From afb8be4f0b88902e2c067f4127626ff4aaf9835a Mon Sep 17 00:00:00 2001 From: nixocio Date: Wed, 23 Feb 2022 09:12:12 -0500 Subject: [PATCH 042/125] Refactor fetch of credential types Refactor fetch of credential types --- .../Credential/CredentialAdd/CredentialAdd.js | 33 +++++++++---------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js b/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js index af5d182616..2525e74826 100644 --- a/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js +++ b/awx/ui/src/screens/Credential/CredentialAdd/CredentialAdd.js @@ -12,6 +12,20 @@ import { import useRequest from 'hooks/useRequest'; import CredentialForm from '../shared/CredentialForm'; +const fetchCredentialTypes = async (pageNo = 1, credentialTypes = []) => { + const { data } = await CredentialTypesAPI.read({ + page_size: 200, + page: pageNo, + }); + if (data.next) { + return fetchCredentialTypes( + pageNo + 1, + credentialTypes.concat(data.results) + ); + } + return credentialTypes.concat(data.results); +}; + function CredentialAdd({ me }) { const history = useHistory(); @@ -76,6 +90,7 @@ function CredentialAdd({ me }) { history.push(`/credentials/${credentialId}/details`); } }, [credentialId, history]); + const { isLoading, error, @@ -83,23 +98,7 @@ function CredentialAdd({ me }) { result, } = useRequest( useCallback(async () => { - const { data } = await CredentialTypesAPI.read({ page_size: 200 }); - const credTypes = data.results; - if (data.next && data.next.includes('page=2')) { - let pageNo = 2; - /* eslint-disable no-await-in-loop */ - do { - const { - data: { results }, - } = await CredentialTypesAPI.read({ - page_size: 200, - page: pageNo, - }); - credTypes.push(...results); - pageNo++; - } while (data.count !== credTypes.length); - } /* eslint-enable no-await-in-loop */ - + const credTypes = await fetchCredentialTypes(); const creds = credTypes.reduce((credentialTypesMap, credentialType) => { credentialTypesMap[credentialType.id] = credentialType; return credentialTypesMap; From c7a1fb67d015da2725d734ebe315c333101f3478 Mon Sep 17 00:00:00 2001 From: John Westcott IV Date: Wed, 23 Feb 2022 09:35:11 -0500 Subject: [PATCH 043/125] SAML superuse/auditor now searching all fields in a list instead of just the first --- awx/sso/pipeline.py | 9 +++++++-- awx/sso/tests/functional/test_pipeline.py | 14 ++++++++++++-- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/awx/sso/pipeline.py b/awx/sso/pipeline.py index 13549861bb..3a63391fe8 100644 --- a/awx/sso/pipeline.py +++ b/awx/sso/pipeline.py @@ -263,9 +263,14 @@ def _check_flag(user, flag, attributes, user_flags_settings): if user_flags_settings.get(is_value_key, None): # If so, check and see if the value of the attr matches the required value attribute_value = attributes.get(attr_setting, None) + attribute_matches = False if isinstance(attribute_value, (list, tuple)): - attribute_value = attribute_value[0] - if attribute_value == user_flags_settings.get(is_value_key): + if user_flags_settings.get(is_value_key) in attribute_value: + attribute_matches = True + elif attribute_value == user_flags_settings.get(is_value_key): + attribute_matches = True + + if attribute_matches: logger.debug("Giving %s %s from attribute %s with matching value" % (user.username, flag, attr_setting)) new_flag = True # if they don't match make sure that new_flag is false diff --git a/awx/sso/tests/functional/test_pipeline.py b/awx/sso/tests/functional/test_pipeline.py index 6ed084a9d7..7954ac11f3 100644 --- a/awx/sso/tests/functional/test_pipeline.py +++ b/awx/sso/tests/functional/test_pipeline.py @@ -447,6 +447,16 @@ class TestSAMLUserFlags: {'is_superuser_role': 'test-role-1', 'is_superuser_attr': 'is_superuser', 'is_superuser_value': 'true'}, (True, True), ), + # In this test case we will validate that a single attribute (instead of a list) still works + ( + {'is_superuser_attr': 'name_id', 'is_superuser_value': 'test_id'}, + (True, True), + ), + # This will be a negative test for a single atrribute + ( + {'is_superuser_attr': 'name_id', 'is_superuser_value': 'junk'}, + (False, False), + ), ], ) def test__check_flag(self, user_flags_settings, expected): @@ -457,10 +467,10 @@ class TestSAMLUserFlags: attributes = { 'email': ['noone@nowhere.com'], 'last_name': ['Westcott'], - 'is_superuser': ['true'], + 'is_superuser': ['something', 'else', 'true'], 'username': ['test_id'], 'first_name': ['John'], - 'Role': ['test-role-1'], + 'Role': ['test-role-1', 'something', 'different'], 'name_id': 'test_id', } From 3ab73ddf84a2d1e6aefab6b8b61e1d6cd28b1056 Mon Sep 17 00:00:00 2001 From: Kersom <9053044+nixocio@users.noreply.github.com> Date: Mon, 21 Feb 2022 16:34:31 -0500 Subject: [PATCH 044/125] Fix TypeError when running a command on a host in a smart inventory (#11768) Fix TypeError when running a command on a host in a smart inventory See: https://github.com/ansible/awx/issues/11611 --- awx/api/views/mesh_visualizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/api/views/mesh_visualizer.py b/awx/api/views/mesh_visualizer.py index 741239cbfa..d2c04f0962 100644 --- a/awx/api/views/mesh_visualizer.py +++ b/awx/api/views/mesh_visualizer.py @@ -19,7 +19,7 @@ class MeshVisualizer(APIView): data = { 'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data, - 'links': InstanceLinkSerializer(InstanceLink.objects.all(), many=True).data, + 'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data, } return Response(data) From a394f11d07a9b1ae6842a85c287f51ad133f061a Mon Sep 17 00:00:00 2001 From: Alex Corey Date: Wed, 23 Feb 2022 11:36:20 -0500 Subject: [PATCH 045/125] Resolves occassions where missing table data moves items to the left (#11772) --- .../screens/User/UserList/UserList.test.js | 46 ++++++++++++++++++- .../src/screens/User/UserList/UserListItem.js | 4 +- 2 files changed, 47 insertions(+), 3 deletions(-) diff --git a/awx/ui/src/screens/User/UserList/UserList.test.js b/awx/ui/src/screens/User/UserList/UserList.test.js index b3fd3303d0..32ab5c94b4 100644 --- a/awx/ui/src/screens/User/UserList/UserList.test.js +++ b/awx/ui/src/screens/User/UserList/UserList.test.js @@ -82,6 +82,41 @@ const mockUsers = [ external_account: null, auth: [], }, + { + id: 10, + type: 'user', + url: '/api/v2/users/10/', + related: { + teams: '/api/v2/users/10/teams/', + organizations: '/api/v2/users/10/organizations/', + admin_of_organizations: '/api/v2/users/10/admin_of_organizations/', + projects: '/api/v2/users/10/projects/', + credentials: '/api/v2/users/10/credentials/', + roles: '/api/v2/users/10/roles/', + activity_stream: '/api/v2/users/10/activity_stream/', + access_list: '/api/v2/users/10/access_list/', + tokens: '/api/v2/users/10/tokens/', + authorized_tokens: '/api/v2/users/10/authorized_tokens/', + personal_tokens: '/api/v2/users/10/personal_tokens/', + }, + summary_fields: { + user_capabilities: { + edit: true, + delete: false, + }, + }, + created: '2019-11-04T18:52:13.565525Z', + username: 'nobody', + first_name: '', + last_name: '', + email: 'systemauditor@ansible.com', + is_superuser: false, + is_system_auditor: true, + ldap_dn: '', + last_login: null, + external_account: null, + auth: [], + }, ]; afterEach(() => { @@ -124,6 +159,15 @@ describe('UsersList with full permissions', () => { expect(wrapper.find('ToolbarAddButton').length).toBe(1); }); + test('Last user should have no first name or last name and the row items should render properly', async () => { + await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0); + expect(UsersAPI.read).toHaveBeenCalled(); + expect(wrapper.find('Td[dataLabel="First Name"]').at(2)).toHaveLength(1); + expect(wrapper.find('Td[dataLabel="First Name"]').at(2).text()).toBe(''); + expect(wrapper.find('Td[dataLabel="Last Name"]').at(2)).toHaveLength(1); + expect(wrapper.find('Td[dataLabel="Last Name"]').at(2).text()).toBe(''); + }); + test('should check and uncheck the row item', async () => { expect( wrapper.find('.pf-c-table__check input').first().props().checked @@ -147,7 +191,7 @@ describe('UsersList with full permissions', () => { }); test('should check all row items when select all is checked', async () => { - expect(wrapper.find('.pf-c-table__check input')).toHaveLength(2); + expect(wrapper.find('.pf-c-table__check input')).toHaveLength(3); wrapper.find('.pf-c-table__check input').forEach((el) => { expect(el.props().checked).toBe(false); }); diff --git a/awx/ui/src/screens/User/UserList/UserListItem.js b/awx/ui/src/screens/User/UserList/UserListItem.js index b4b85a4077..4845de1993 100644 --- a/awx/ui/src/screens/User/UserList/UserListItem.js +++ b/awx/ui/src/screens/User/UserList/UserListItem.js @@ -50,8 +50,8 @@ function UserListItem({ user, isSelected, onSelect, detailUrl, rowIndex }) { )} - {user.first_name && {user.first_name}} - {user.last_name && {user.last_name}} + {user.first_name} + {user.last_name} {user_type} Date: Wed, 23 Feb 2022 13:56:25 -0500 Subject: [PATCH 046/125] fix missing job lifecycle messages (#11801) we were missing these messages for control type jobs that call start_task earlier than other types of jobs --- awx/main/scheduler/task_manager.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index 09ea5e23e8..beb4840c9a 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -493,6 +493,8 @@ class TaskManager: control_instance.jobs_running += 1 self.dependency_graph.add_job(task) execution_instance = self.real_instances[control_instance.hostname] + task.log_lifecycle("controller_node_chosen") + task.log_lifecycle("execution_node_chosen") self.start_task(task, self.controlplane_ig, task.get_jobs_fail_chain(), execution_instance) found_acceptable_queue = True continue From 0d75a25bf0ecd0b4aafd0ea60330dc5bab3b7a93 Mon Sep 17 00:00:00 2001 From: Marcelo Moreira de Mello Date: Tue, 8 Feb 2022 15:11:15 -0500 Subject: [PATCH 047/125] Do not mount /etc/redhat-access-insights into EEs Sharing the /etc/redhat-access-insights is no longer required for EEs. Furthermore, this fixes a SELinux issue when launching multiple jobs with concurrency and fact_caching enabled. i.e: lsetxattr /etc/redhat-access-insights: operation not permitted --- awx/main/tasks/jobs.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py index f31eb7084f..7dea383014 100644 --- a/awx/main/tasks/jobs.py +++ b/awx/main/tasks/jobs.py @@ -854,24 +854,6 @@ class RunJob(BaseTask): d[r'Vault password \({}\):\s*?$'.format(vault_id)] = k return d - def build_execution_environment_params(self, instance, private_data_dir): - if settings.IS_K8S: - return {} - - params = super(RunJob, self).build_execution_environment_params(instance, private_data_dir) - # If this has an insights agent and it is not already mounted then show it - insights_dir = os.path.dirname(settings.INSIGHTS_SYSTEM_ID_FILE) - if instance.use_fact_cache and os.path.exists(insights_dir): - logger.info('not parent of others') - params.setdefault('container_volume_mounts', []) - params['container_volume_mounts'].extend( - [ - f"{insights_dir}:{insights_dir}:Z", - ] - ) - - return params - def pre_run_hook(self, job, private_data_dir): super(RunJob, self).pre_run_hook(job, private_data_dir) if job.inventory is None: From 7fbab6760e395426e905a3fa6ab4069d14fc8d79 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Wed, 23 Feb 2022 12:37:20 -0800 Subject: [PATCH 048/125] Small layout adjustment. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 7 +++---- awx/ui/src/screens/TopologyView/constants.js | 12 +++--------- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 1504f4bdae..fd5a17164d 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -59,12 +59,11 @@ function MeshGraph({ data, showLegend, zoom, setShowZoomControls }) { const graph = data; const simulation = d3 - .forceSimulation(graph.nodes) + .forceSimulation() + .nodes(graph.nodes) .force( 'charge', - d3 - .forceManyBody(MESH_FORCE_LAYOUT.defaultForceBody) - .strength(MESH_FORCE_LAYOUT.defaultForceStrength) + d3.forceManyBody().strength(MESH_FORCE_LAYOUT.defaultForceStrength) ) .force( 'link', diff --git a/awx/ui/src/screens/TopologyView/constants.js b/awx/ui/src/screens/TopologyView/constants.js index 590a375d5e..a19abd63cd 100644 --- a/awx/ui/src/screens/TopologyView/constants.js +++ b/awx/ui/src/screens/TopologyView/constants.js @@ -1,30 +1,24 @@ export const SELECTOR = '#chart'; export const PARENTSELECTOR = '.mesh-svg'; export const CHILDSELECTOR = '.mesh'; +export const DEFAULT_RADIUS = 16; export const MESH_FORCE_LAYOUT = { - defaultCollisionFactor: 80, - defaultForceStrength: -100, - defaultForceBody: 75, + defaultCollisionFactor: DEFAULT_RADIUS * 2 + 20, + defaultForceStrength: -30, defaultForceX: 0, defaultForceY: 0, }; - -export const DEFAULT_RADIUS = 16; export const DEFAULT_NODE_COLOR = '#0066CC'; export const DEFAULT_NODE_HIGHLIGHT_COLOR = '#16407C'; export const DEFAULT_NODE_LABEL_TEXT_COLOR = 'white'; export const DEFAULT_FONT_SIZE = '12px'; export const LABEL_TEXT_MAX_LENGTH = 15; - export const MARGIN = 15; -export const FALLBACK_WIDTH = 700; - export const NODE_STATE_COLOR_KEY = { disabled: '#6A6E73', healthy: '#3E8635', error: '#C9190B', }; - export const NODE_STATE_HTML_ENTITY_KEY = { disabled: '\u25EF', healthy: '\u2713', From fd135caed5007ba291d60b65f8464db4f5dce546 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Wed, 23 Feb 2022 18:54:18 -0800 Subject: [PATCH 049/125] Offload simulation calculation to web worker. --- .../screens/TopologyView/ContentLoading.js | 15 +- awx/ui/src/screens/TopologyView/MeshGraph.js | 333 +++++++++--------- awx/ui/src/screens/TopologyView/constants.js | 5 +- .../utils/workers/simulationWorker.js | 35 ++ 4 files changed, 217 insertions(+), 171 deletions(-) create mode 100644 awx/ui/src/screens/TopologyView/utils/workers/simulationWorker.js diff --git a/awx/ui/src/screens/TopologyView/ContentLoading.js b/awx/ui/src/screens/TopologyView/ContentLoading.js index b137299c5d..cb67f6d34b 100644 --- a/awx/ui/src/screens/TopologyView/ContentLoading.js +++ b/awx/ui/src/screens/TopologyView/ContentLoading.js @@ -4,11 +4,11 @@ import { t } from '@lingui/macro'; import styled from 'styled-components'; import { EmptyState as PFEmptyState, - EmptyStateIcon, + Progress, + ProgressMeasureLocation, Text, TextContent, TextVariants, - Spinner, } from '@patternfly/react-core'; import { TopologyIcon as PFTopologyIcon } from '@patternfly/react-icons'; @@ -23,10 +23,16 @@ const TopologyIcon = styled(PFTopologyIcon)` fill: #6a6e73; `; -const ContentLoading = ({ className }) => ( +const ContentLoading = ({ className, progress }) => ( - + + ( {t`Please wait until the topology view is populated...`} - ); diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index fd5a17164d..583000a36b 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -19,7 +19,6 @@ import { // getRandomInt, } from './utils/helpers'; import { - MESH_FORCE_LAYOUT, DEFAULT_RADIUS, DEFAULT_NODE_COLOR, DEFAULT_NODE_HIGHLIGHT_COLOR, @@ -39,10 +38,12 @@ function MeshGraph({ data, showLegend, zoom, setShowZoomControls }) { const [isNodeSelected, setIsNodeSelected] = useState(false); const [selectedNode, setSelectedNode] = useState(null); const [nodeDetail, setNodeDetail] = useState(null); + const [simulationProgress, setSimulationProgress] = useState(null); const history = useHistory(); // const data = generateRandomNodes(getRandomInt(4, 50)); const draw = () => { + setShowZoomControls(false); const width = getWidth(SELECTOR); const height = getHeight(SELECTOR); @@ -58,136 +59,164 @@ function MeshGraph({ data, showLegend, zoom, setShowZoomControls }) { const graph = data; - const simulation = d3 - .forceSimulation() - .nodes(graph.nodes) - .force( - 'charge', - d3.forceManyBody().strength(MESH_FORCE_LAYOUT.defaultForceStrength) - ) - .force( - 'link', - d3.forceLink(graph.links).id((d) => d.hostname) - ) - .force( - 'collide', - d3.forceCollide(MESH_FORCE_LAYOUT.defaultCollisionFactor) - ) - .force('forceX', d3.forceX(MESH_FORCE_LAYOUT.defaultForceX)) - .force('forceY', d3.forceY(MESH_FORCE_LAYOUT.defaultForceY)) - .force('center', d3.forceCenter(width / 2, height / 2)); + /* WEB WORKER */ + const worker = new Worker( + new URL('./utils/workers/simulationWorker.js', import.meta.url) + ); - const link = mesh - .append('g') - .attr('class', `links`) - .attr('data-cy', 'links') - .selectAll('line') - .data(graph.links) - .enter() - .append('line') - .attr('class', (_, i) => `link-${i}`) - .attr('data-cy', (d) => `${d.source.hostname}-${d.target.hostname}`) - .style('fill', 'none') - .style('stroke', '#ccc') - .style('stroke-width', '2px') - .attr('pointer-events', 'none') - .on('mouseover', function showPointer() { - d3.select(this).transition().style('cursor', 'pointer'); - }); + worker.postMessage({ + nodes: graph.nodes, + links: graph.links, + }); - const node = mesh - .append('g') - .attr('class', 'nodes') - .attr('data-cy', 'nodes') - .selectAll('g') - .data(graph.nodes) - .enter() - .append('g') - .on('mouseenter', function handleNodeHover(_, d) { - d3.select(this).transition().style('cursor', 'pointer'); - highlightSiblings(d); - }) - .on('mouseleave', (_, d) => { - deselectSiblings(d); - }) - .on('click', (_, d) => { - setNodeDetail(d); - highlightSelected(d); - }); + worker.onmessage = function handleWorkerEvent(event) { + switch (event.data.type) { + case 'tick': + return ticked(event.data); + case 'end': + return ended(event.data); + default: + return false; + } + }; - // node circles - node - .append('circle') - .attr('r', DEFAULT_RADIUS) - .attr('class', (d) => d.node_type) - .attr('class', (d) => `id-${d.id}`) - .attr('fill', DEFAULT_NODE_COLOR) - .attr('stroke', DEFAULT_NODE_LABEL_TEXT_COLOR); + function ticked({ progress }) { + const calculatedPercent = Math.round(progress * 100); + setSimulationProgress(calculatedPercent); + } - // node type labels - node - .append('text') - .text((d) => renderNodeType(d.node_type)) - .attr('text-anchor', 'middle') - .attr('dominant-baseline', 'central') - .attr('fill', DEFAULT_NODE_LABEL_TEXT_COLOR); - - // node hostname labels - const hostNames = node.append('g'); - hostNames - .append('text') - .text((d) => renderLabelText(d.node_state, d.hostname)) - .attr('class', 'placeholder') - .attr('fill', DEFAULT_NODE_LABEL_TEXT_COLOR) - .attr('text-anchor', 'middle') - .attr('y', 40) - .each(function calculateLabelWidth() { - // eslint-disable-next-line react/no-this-in-sfc - const bbox = this.getBBox(); - // eslint-disable-next-line react/no-this-in-sfc - d3.select(this.parentNode) - .append('rect') - .attr('x', bbox.x) - .attr('y', bbox.y) - .attr('width', bbox.width) - .attr('height', bbox.height) - .attr('rx', 8) - .attr('ry', 8) - .style('fill', (d) => renderStateColor(d.node_state)); - }); - svg.selectAll('text.placeholder').remove(); - hostNames - .append('text') - .text((d) => renderLabelText(d.node_state, d.hostname)) - .attr('font-size', DEFAULT_FONT_SIZE) - .attr('fill', DEFAULT_NODE_LABEL_TEXT_COLOR) - .attr('text-anchor', 'middle') - .attr('y', 38); - - simulation.nodes(graph.nodes).on('tick', ticked); - simulation.force('link').links(graph.links); - - function ticked() { - d3.select('.simulation-loader').style('visibility', 'visible'); - - link + function ended({ nodes, links }) { + // Remove loading screen + d3.select('.simulation-loader').style('visibility', 'hidden'); + setShowZoomControls(true); + // Center the mesh + const simulation = d3 + .forceSimulation(nodes) + .force('center', d3.forceCenter(width / 2, height / 2)); + simulation.tick(); + // Add links + mesh + .append('g') + .attr('class', `links`) + .attr('data-cy', 'links') + .selectAll('line') + .data(links) + .enter() + .append('line') .attr('x1', (d) => d.source.x) .attr('y1', (d) => d.source.y) .attr('x2', (d) => d.target.x) - .attr('y2', (d) => d.target.y); + .attr('y2', (d) => d.target.y) + .attr('class', (_, i) => `link-${i}`) + .attr('data-cy', (d) => `${d.source.hostname}-${d.target.hostname}`) + .style('fill', 'none') + .style('stroke', '#ccc') + .style('stroke-width', '2px') + .attr('pointer-events', 'none') + .on('mouseover', function showPointer() { + d3.select(this).transition().style('cursor', 'pointer'); + }); + // add nodes + const node = mesh + .append('g') + .attr('class', 'nodes') + .attr('data-cy', 'nodes') + .selectAll('g') + .data(nodes) + .enter() + .append('g') + .on('mouseenter', function handleNodeHover(_, d) { + d3.select(this).transition().style('cursor', 'pointer'); + highlightSiblings(d); + }) + .on('mouseleave', (_, d) => { + deselectSiblings(d); + }) + .on('click', (_, d) => { + setNodeDetail(d); + highlightSelected(d); + }); - node.attr('transform', (d) => `translate(${d.x},${d.y})`); - calculateAlphaDecay(simulation.alpha(), simulation.alphaMin(), 20); - } + // node circles + node + .append('circle') + .attr('r', DEFAULT_RADIUS) + .attr('cx', (d) => d.x) + .attr('cy', (d) => d.y) + .attr('class', (d) => d.node_type) + .attr('class', (d) => `id-${d.id}`) + .attr('fill', DEFAULT_NODE_COLOR) + .attr('stroke', DEFAULT_NODE_LABEL_TEXT_COLOR); - svg.call(zoom); + // node type labels + node + .append('text') + .text((d) => renderNodeType(d.node_type)) + .attr('x', (d) => d.x) + .attr('y', (d) => d.y) + .attr('text-anchor', 'middle') + .attr('dominant-baseline', 'central') + .attr('fill', DEFAULT_NODE_LABEL_TEXT_COLOR); - function highlightSiblings(n) { - setTimeout(() => { - svg - .select(`circle.id-${n.id}`) - .attr('fill', DEFAULT_NODE_HIGHLIGHT_COLOR); - const immediate = graph.links.filter( + // node hostname labels + const hostNames = node.append('g'); + hostNames + .append('text') + .attr('x', (d) => d.x) + .attr('y', (d) => d.y + 40) + .text((d) => renderLabelText(d.node_state, d.hostname)) + .attr('class', 'placeholder') + .attr('fill', DEFAULT_NODE_LABEL_TEXT_COLOR) + .attr('text-anchor', 'middle') + .each(function calculateLabelWidth() { + // eslint-disable-next-line react/no-this-in-sfc + const bbox = this.getBBox(); + // eslint-disable-next-line react/no-this-in-sfc + d3.select(this.parentNode) + .append('rect') + .attr('x', bbox.x) + .attr('y', bbox.y) + .attr('width', bbox.width) + .attr('height', bbox.height) + .attr('rx', 8) + .attr('ry', 8) + .style('fill', (d) => renderStateColor(d.node_state)); + }); + svg.selectAll('text.placeholder').remove(); + hostNames + .append('text') + .attr('x', (d) => d.x) + .attr('y', (d) => d.y + 38) + .text((d) => renderLabelText(d.node_state, d.hostname)) + .attr('font-size', DEFAULT_FONT_SIZE) + .attr('fill', DEFAULT_NODE_LABEL_TEXT_COLOR) + .attr('text-anchor', 'middle'); + + svg.call(zoom); + + function highlightSiblings(n) { + setTimeout(() => { + svg + .select(`circle.id-${n.id}`) + .attr('fill', DEFAULT_NODE_HIGHLIGHT_COLOR); + const immediate = links.filter( + (l) => + n.hostname === l.source.hostname || + n.hostname === l.target.hostname + ); + immediate.forEach((s) => { + svg + .selectAll(`.link-${s.index}`) + .transition() + .style('stroke', '#0066CC') + .style('stroke-width', '3px'); + }); + }, 0); + } + + function deselectSiblings(n) { + svg.select(`circle.id-${n.id}`).attr('fill', DEFAULT_NODE_COLOR); + const immediate = links.filter( (l) => n.hostname === l.source.hostname || n.hostname === l.target.hostname ); @@ -195,51 +224,27 @@ function MeshGraph({ data, showLegend, zoom, setShowZoomControls }) { svg .selectAll(`.link-${s.index}`) .transition() - .style('stroke', '#0066CC') - .style('stroke-width', '3px'); + .style('stroke', '#ccc') + .style('stroke-width', '2px'); }); - }, 0); - } - - function deselectSiblings(n) { - svg.select(`circle.id-${n.id}`).attr('fill', DEFAULT_NODE_COLOR); - const immediate = graph.links.filter( - (l) => - n.hostname === l.source.hostname || n.hostname === l.target.hostname - ); - immediate.forEach((s) => { - svg - .selectAll(`.link-${s.index}`) - .transition() - .style('stroke', '#ccc') - .style('stroke-width', '2px'); - }); - } - - function highlightSelected(n) { - if (svg.select(`circle.id-${n.id}`).attr('stroke-width') !== null) { - // toggle rings - svg.select(`circle.id-${n.id}`).attr('stroke-width', null); - // show default empty state of tooltip - setIsNodeSelected(false); - setSelectedNode(null); - return; } - svg.selectAll('circle').attr('stroke-width', null); - svg - .select(`circle.id-${n.id}`) - .attr('stroke-width', '5px') - .attr('stroke', '#D2D2D2'); - setIsNodeSelected(true); - setSelectedNode(n); - } - function calculateAlphaDecay(a, aMin, x) { - setShowZoomControls(false); - const decayPercentage = Math.min((aMin / a) * 100); - if (decayPercentage >= x) { - d3.select('.simulation-loader').style('visibility', 'hidden'); - setShowZoomControls(true); + function highlightSelected(n) { + if (svg.select(`circle.id-${n.id}`).attr('stroke-width') !== null) { + // toggle rings + svg.select(`circle.id-${n.id}`).attr('stroke-width', null); + // show default empty state of tooltip + setIsNodeSelected(false); + setSelectedNode(null); + return; + } + svg.selectAll('circle').attr('stroke-width', null); + svg + .select(`circle.id-${n.id}`) + .attr('stroke-width', '5px') + .attr('stroke', '#D2D2D2'); + setIsNodeSelected(true); + setSelectedNode(n); } } }; @@ -267,7 +272,7 @@ function MeshGraph({ data, showLegend, zoom, setShowZoomControls }) { redirectToDetailsPage(selectedNode, history) } /> - +
); } diff --git a/awx/ui/src/screens/TopologyView/constants.js b/awx/ui/src/screens/TopologyView/constants.js index a19abd63cd..d217078f6c 100644 --- a/awx/ui/src/screens/TopologyView/constants.js +++ b/awx/ui/src/screens/TopologyView/constants.js @@ -3,8 +3,9 @@ export const PARENTSELECTOR = '.mesh-svg'; export const CHILDSELECTOR = '.mesh'; export const DEFAULT_RADIUS = 16; export const MESH_FORCE_LAYOUT = { - defaultCollisionFactor: DEFAULT_RADIUS * 2 + 20, - defaultForceStrength: -30, + defaultCollisionFactor: DEFAULT_RADIUS * 2 + 30, + defaultForceStrength: -50, + defaultForceBody: 15, defaultForceX: 0, defaultForceY: 0, }; diff --git a/awx/ui/src/screens/TopologyView/utils/workers/simulationWorker.js b/awx/ui/src/screens/TopologyView/utils/workers/simulationWorker.js new file mode 100644 index 0000000000..95d5a1170d --- /dev/null +++ b/awx/ui/src/screens/TopologyView/utils/workers/simulationWorker.js @@ -0,0 +1,35 @@ +import * as d3 from 'd3'; +import { MESH_FORCE_LAYOUT } from '../../constants'; + +onmessage = function calculateLayout({ data: { nodes, links } }) { + const simulation = d3 + .forceSimulation(nodes) + .force( + 'charge', + d3 + .forceManyBody(MESH_FORCE_LAYOUT.defaultForceBody) + .strength(MESH_FORCE_LAYOUT.defaultForceStrength) + ) + .force( + 'link', + d3.forceLink(links).id((d) => d.hostname) + ) + .force('collide', d3.forceCollide(MESH_FORCE_LAYOUT.defaultCollisionFactor)) + .force('forceX', d3.forceX(MESH_FORCE_LAYOUT.defaultForceX)) + .force('forceY', d3.forceY(MESH_FORCE_LAYOUT.defaultForceY)) + .stop(); + + for ( + let i = 0, + n = Math.ceil( + Math.log(simulation.alphaMin()) / Math.log(1 - simulation.alphaDecay()) + ); + i < n; + ++i + ) { + postMessage({ type: 'tick', progress: i / n }); + simulation.tick(); + } + + postMessage({ type: 'end', nodes, links }); +}; From 88f66d5c51ef225374d24f759fd336689f890737 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Wed, 16 Feb 2022 11:43:02 -0500 Subject: [PATCH 050/125] Enable Podman ipv6 support by default --- awx/settings/defaults.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 5ddefb66bd..7a41615b10 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -996,4 +996,4 @@ DEFAULT_CONTROL_PLANE_QUEUE_NAME = 'controlplane' # Extend container runtime attributes. # For example, to disable SELinux in containers for podman # DEFAULT_CONTAINER_RUN_OPTIONS = ['--security-opt', 'label=disable'] -DEFAULT_CONTAINER_RUN_OPTIONS = [] +DEFAULT_CONTAINER_RUN_OPTIONS = ['--network', 'slirp4netns:enable_ipv6=true'] From 4bd6c2a8046b20facc3dc2275378554546e3c6d5 Mon Sep 17 00:00:00 2001 From: Elijah DeLee Date: Wed, 23 Feb 2022 11:06:10 -0500 Subject: [PATCH 051/125] set max dispatch workers to same as max forks Right now, without this, we end up with a different number for max_workers than max_forks. For example, on a control node with 16 Gi of RAM, max_mem_capacity w/ 100 MB/fork = (16*1024)/100 --> 164 max_workers = 5 * 16 --> 80 This means we would allow that control node to control up to 164 jobs, but all jobs after the 80th job will be stuck in `waiting` waiting for a dispatch worker to free up to run the job. --- awx/main/dispatch/pool.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/awx/main/dispatch/pool.py b/awx/main/dispatch/pool.py index 3d08ca3fd7..f1f46363f3 100644 --- a/awx/main/dispatch/pool.py +++ b/awx/main/dispatch/pool.py @@ -22,7 +22,7 @@ import psutil from awx.main.models import UnifiedJob from awx.main.dispatch import reaper -from awx.main.utils.common import convert_mem_str_to_bytes +from awx.main.utils.common import convert_mem_str_to_bytes, get_mem_effective_capacity if 'run_callback_receiver' in sys.argv: logger = logging.getLogger('awx.main.commands.run_callback_receiver') @@ -324,8 +324,9 @@ class AutoscalePool(WorkerPool): total_memory_gb = convert_mem_str_to_bytes(settings_absmem) // 2**30 else: total_memory_gb = (psutil.virtual_memory().total >> 30) + 1 # noqa: round up - # 5 workers per GB of total memory - self.max_workers = total_memory_gb * 5 + + # Get same number as max forks based on memory, this function takes memory as bytes + self.max_workers = get_mem_effective_capacity(total_memory_gb * 2**30) # max workers can't be less than min_workers self.max_workers = max(self.min_workers, self.max_workers) From 4d47f24dd48e3ff998268f35d1b2c0f855ff45e2 Mon Sep 17 00:00:00 2001 From: John Westcott IV Date: Thu, 24 Feb 2022 11:17:36 -0500 Subject: [PATCH 052/125] Chaning API version from v1beta1 to v1 --- tools/docker-compose-minikube/minikube/templates/rbac.yml.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/docker-compose-minikube/minikube/templates/rbac.yml.j2 b/tools/docker-compose-minikube/minikube/templates/rbac.yml.j2 index 642e2d94ca..a015a8e425 100644 --- a/tools/docker-compose-minikube/minikube/templates/rbac.yml.j2 +++ b/tools/docker-compose-minikube/minikube/templates/rbac.yml.j2 @@ -24,7 +24,7 @@ rules: resources: ["secrets"] verbs: ["get", "create", "delete"] --- -apiVersion: rbac.authorization.k8s.io/v1beta1 +apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: name: {{ minikube_service_account_name }} From cb57752903bf7f7ceaa296e226a56dbc9cb034e1 Mon Sep 17 00:00:00 2001 From: John Westcott IV <32551173+john-westcott-iv@users.noreply.github.com> Date: Sun, 27 Feb 2022 07:27:25 -0500 Subject: [PATCH 053/125] Changing session cookie name and added a way for clients to know what the name is #11413 (#11679) * Changing session cookie name and added a way for clients to know what the key name is * Adding session information to docs * Fixing how awxkit gets the session id header --- awx/api/generics.py | 1 + awx/settings/defaults.py | 4 ++++ awx/sso/views.py | 1 + awxkit/awxkit/api/client.py | 15 ++++++++++++--- awxkit/awxkit/awx/utils.py | 8 ++++---- awxkit/awxkit/ws.py | 6 ++++-- docs/auth/session.md | 21 +++++++++++++-------- 7 files changed, 39 insertions(+), 17 deletions(-) diff --git a/awx/api/generics.py b/awx/api/generics.py index b10728f32a..58ed5a9801 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -99,6 +99,7 @@ class LoggedLoginView(auth_views.LoginView): current_user = smart_text(JSONRenderer().render(current_user.data)) current_user = urllib.parse.quote('%s' % current_user, '') ret.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None) + ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) return ret else: diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 7a41615b10..9d0078916d 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -252,6 +252,10 @@ SESSION_COOKIE_SECURE = True # Note: This setting may be overridden by database settings. SESSION_COOKIE_AGE = 1800 +# Name of the cookie that contains the session information. +# Note: Changing this value may require changes to any clients. +SESSION_COOKIE_NAME = 'awx_sessionid' + # Maximum number of per-user valid, concurrent sessions. # -1 is unlimited # Note: This setting may be overridden by database settings. diff --git a/awx/sso/views.py b/awx/sso/views.py index 35f81b26a4..2f3a448af9 100644 --- a/awx/sso/views.py +++ b/awx/sso/views.py @@ -46,6 +46,7 @@ class CompleteView(BaseRedirectView): current_user = smart_text(JSONRenderer().render(current_user.data)) current_user = urllib.parse.quote('%s' % current_user, '') response.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None) + response.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) return response diff --git a/awxkit/awxkit/api/client.py b/awxkit/awxkit/api/client.py index 1cea4a61c2..04b399e079 100644 --- a/awxkit/awxkit/api/client.py +++ b/awxkit/awxkit/api/client.py @@ -33,6 +33,10 @@ class Connection(object): def __init__(self, server, verify=False): self.server = server self.verify = verify + # Note: We use the old sessionid here incase someone is trying to connect to an older AWX version + # There is a check below so that if AWX returns an X-API-Session-Cookie-Name we will grab it and + # connect with the new session cookie name. + self.session_cookie_name = 'sessionid' if not self.verify: requests.packages.urllib3.disable_warnings() @@ -49,8 +53,13 @@ class Connection(object): _next = kwargs.get('next') if _next: headers = self.session.headers.copy() - self.post('/api/login/', headers=headers, data=dict(username=username, password=password, next=_next)) - self.session_id = self.session.cookies.get('sessionid') + response = self.post('/api/login/', headers=headers, data=dict(username=username, password=password, next=_next)) + # The login causes a redirect so we need to search the history of the request to find the header + for historical_response in response.history: + if 'X-API-Session-Cookie-Name' in historical_response.headers: + self.session_cookie_name = historical_response.headers.get('X-API-Session-Cookie-Name') + + self.session_id = self.session.cookies.get(self.session_cookie_name, None) self.uses_session_cookie = True else: self.session.auth = (username, password) @@ -61,7 +70,7 @@ class Connection(object): def logout(self): if self.uses_session_cookie: - self.session.cookies.pop('sessionid', None) + self.session.cookies.pop(self.session_cookie_name, None) else: self.session.auth = None diff --git a/awxkit/awxkit/awx/utils.py b/awxkit/awxkit/awx/utils.py index 6fc3a18480..df61f0b7a0 100644 --- a/awxkit/awxkit/awx/utils.py +++ b/awxkit/awxkit/awx/utils.py @@ -95,12 +95,12 @@ def as_user(v, username, password=None): # requests doesn't provide interface for retrieving # domain segregated cookies other than iterating. for cookie in connection.session.cookies: - if cookie.name == 'sessionid': + if cookie.name == connection.session_cookie_name: session_id = cookie.value domain = cookie.domain break if session_id: - del connection.session.cookies['sessionid'] + del connection.session.cookies[connection.session_cookie_name] if access_token: kwargs = dict(token=access_token) else: @@ -114,9 +114,9 @@ def as_user(v, username, password=None): if config.use_sessions: if access_token: connection.session.auth = None - del connection.session.cookies['sessionid'] + del connection.session.cookies[connection.session_cookie_name] if session_id: - connection.session.cookies.set('sessionid', session_id, domain=domain) + connection.session.cookies.set(connection.session_cookie_name, session_id, domain=domain) else: connection.session.auth = previous_auth diff --git a/awxkit/awxkit/ws.py b/awxkit/awxkit/ws.py index d56fccf719..b2b51fefba 100644 --- a/awxkit/awxkit/ws.py +++ b/awxkit/awxkit/ws.py @@ -51,7 +51,9 @@ class WSClient(object): # Subscription group types - def __init__(self, token=None, hostname='', port=443, secure=True, session_id=None, csrftoken=None, add_received_time=False): + def __init__( + self, token=None, hostname='', port=443, secure=True, session_id=None, csrftoken=None, add_received_time=False, session_cookie_name='awx_sessionid' + ): # delay this import, because this is an optional dependency import websocket @@ -78,7 +80,7 @@ class WSClient(object): if self.token is not None: auth_cookie = 'token="{0.token}";'.format(self) elif self.session_id is not None: - auth_cookie = 'sessionid="{0.session_id}"'.format(self) + auth_cookie = '{1}="{0.session_id}"'.format(self, session_cookie_name) if self.csrftoken: auth_cookie += ';csrftoken={0.csrftoken}'.format(self) else: diff --git a/docs/auth/session.md b/docs/auth/session.md index f5a3d3888f..df1248ae3f 100644 --- a/docs/auth/session.md +++ b/docs/auth/session.md @@ -6,9 +6,9 @@ Session authentication is a safer way of utilizing HTTP(S) cookies. Theoreticall `Cookie` header, but this method is vulnerable to cookie hijacks, where crackers can see and steal user information from the cookie payload. -Session authentication, on the other hand, sets a single `session_id` cookie. The `session_id` -is *a random string which will be mapped to user authentication informations by server*. Crackers who -hijack cookies will only get the `session_id` itself, which does not imply any critical user info, is valid only for +Session authentication, on the other hand, sets a single `awx_sessionid` cookie. The `awx_sessionid` +is *a random string which will be mapped to user authentication information by the server*. Crackers who +hijack cookies will only get the `awx_sessionid` itself, which does not imply any critical user info, is valid only for a limited time, and can be revoked at any time. > Note: The CSRF token will by default allow HTTP. To increase security, the `CSRF_COOKIE_SECURE` setting should @@ -34,22 +34,27 @@ be provided in the form: * `next`: The path of the redirect destination, in API browser `"/api/"` is used. * `csrfmiddlewaretoken`: The CSRF token, usually populated by using Django template `{% csrf_token %}`. -The `session_id` is provided as a return `Set-Cookie` header. Here is a typical one: +The `awx_session_id` is provided as a return `Set-Cookie` header. Here is a typical one: ``` -Set-Cookie: sessionid=lwan8l5ynhrqvps280rg5upp7n3yp6ds; expires=Tue, 21-Nov-2017 16:33:13 GMT; httponly; Max-Age=1209600; Path=/ +Set-Cookie: awx_sessionid=lwan8l5ynhrqvps280rg5upp7n3yp6ds; expires=Tue, 21-Nov-2017 16:33:13 GMT; httponly; Max-Age=1209600; Path=/ ``` + +In addition, when the `awx_sessionid` a header called `X-API-Session-Cookie-Name` this header will only be displayed once on a successful logging and denotes the name of the session cookie name. By default this is `awx_sessionid` but can be changed (see below). + Any client should follow the standard rules of [cookie protocol](https://tools.ietf.org/html/rfc6265) to -parse that header to obtain information about the session, such as session cookie name (`session_id`), +parse that header to obtain information about the session, such as session cookie name (`awx_sessionid`), session cookie value, expiration date, duration, etc. +The name of the cookie is configurable by Tower Configuration setting `SESSION_COOKIE_NAME` under the category `authentication`. It is a string. The default session cookie name is `awx_sessionid`. + The duration of the cookie is configurable by Tower Configuration setting `SESSION_COOKIE_AGE` under category `authentication`. It is an integer denoting the number of seconds the session cookie should live. The default session cookie age is two weeks. -After a valid session is acquired, a client should provide the `session_id` as a cookie for subsequent requests +After a valid session is acquired, a client should provide the `awx_sessionid` as a cookie for subsequent requests in order to be authenticated. For example: ``` -Cookie: sessionid=lwan8l5ynhrqvps280rg5upp7n3yp6ds; ... +Cookie: awx_sessionid=lwan8l5ynhrqvps280rg5upp7n3yp6ds; ... ``` User should use the `/api/logout/` endpoint to log out. In the API browser, a logged-in user can do that by From eb520956706df22f86a0c2ed7c43f311f6cbbfcf Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Mon, 28 Feb 2022 08:38:01 -0500 Subject: [PATCH 054/125] Fix bug where translated strings will cause log error to error (#11813) * Fix bug where translated strings will cause log error to error * Use force_str for ensuring string --- awx/main/utils/handlers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/awx/main/utils/handlers.py b/awx/main/utils/handlers.py index ef761159ed..c6a2b3b596 100644 --- a/awx/main/utils/handlers.py +++ b/awx/main/utils/handlers.py @@ -10,6 +10,7 @@ from datetime import datetime # Django from django.conf import settings from django.utils.timezone import now +from django.utils.encoding import force_str # AWX from awx.main.exceptions import PostRunError @@ -42,7 +43,7 @@ class RSysLogHandler(logging.handlers.SysLogHandler): msg += exc.splitlines()[-1] except Exception: msg += exc - msg = '\n'.join([msg, record.msg, '']) + msg = '\n'.join([msg, force_str(record.msg), '']) # force_str used in case of translated strings sys.stderr.write(msg) def emit(self, msg): From 5e8107621eea7e556944db0688f909c93547f7ad Mon Sep 17 00:00:00 2001 From: Marcelo Moreira de Mello Date: Mon, 24 Jan 2022 15:44:43 -0500 Subject: [PATCH 055/125] Allow isolated paths as hostPath volume @ k8s/ocp/container groups --- awx/main/conf.py | 13 ++++++ awx/main/constants.py | 3 +- awx/main/tasks/jobs.py | 8 ++++ awx/main/tasks/receptor.py | 44 +++++++++++++++++++ awx/settings/defaults.py | 3 ++ .../screens/Setting/Jobs/JobsEdit/JobsEdit.js | 4 ++ .../JobsEdit/data.defaultJobSettings.json | 1 + 7 files changed, 75 insertions(+), 1 deletion(-) diff --git a/awx/main/conf.py b/awx/main/conf.py index 6756347b54..c754ecc92a 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -334,6 +334,19 @@ register( category_slug='jobs', ) +register( + 'AWX_MOUNT_ISOLATED_PATHS_ON_K8S', + field_class=fields.BooleanField, + default=False, + label=_('Expose host paths for Container Groups'), + help_text=_( + 'Expose paths via hostPath for the Pods created by a Container Group. ' + 'HostPath volumes present many security risks, and it is a best practice to avoid the use of HostPaths when possible. ' + ), + category=_('Jobs'), + category_slug='jobs', +) + register( 'GALAXY_IGNORE_CERTS', field_class=fields.BooleanField, diff --git a/awx/main/constants.py b/awx/main/constants.py index 36209c3334..9074d9bd7f 100644 --- a/awx/main/constants.py +++ b/awx/main/constants.py @@ -88,7 +88,8 @@ JOB_FOLDER_PREFIX = 'awx_%s_' # :z option tells Podman that two containers share the volume content with r/w # :O option tells Podman to mount the directory from the host as a temporary storage using the overlay file system. +# :ro or :rw option to mount a volume in read-only or read-write mode, respectively. By default, the volumes are mounted read-write. # see podman-run manpage for further details # /HOST-DIR:/CONTAINER-DIR:OPTIONS -CONTAINER_VOLUMES_MOUNT_TYPES = ['z', 'O'] +CONTAINER_VOLUMES_MOUNT_TYPES = ['z', 'O', 'ro', 'rw'] MAX_ISOLATED_PATH_COLON_DELIMITER = 2 diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py index 7dea383014..54ce9b2f3d 100644 --- a/awx/main/tasks/jobs.py +++ b/awx/main/tasks/jobs.py @@ -40,6 +40,7 @@ from awx.main.constants import ( STANDARD_INVENTORY_UPDATE_ENV, JOB_FOLDER_PREFIX, MAX_ISOLATED_PATH_COLON_DELIMITER, + CONTAINER_VOLUMES_MOUNT_TYPES, ) from awx.main.models import ( Instance, @@ -164,6 +165,13 @@ class BaseTask(object): # Uppercase Z restricts access (in weird ways) to 1 container at a time if this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER: src, dest, scontext = this_path.split(':') + + # scontext validation via performed via API, but since this can be overriden via settings.py + # let's ensure scontext is one that we support + if scontext not in CONTAINER_VOLUMES_MOUNT_TYPES: + scontext = 'z' + logger.warn(f'The path {this_path} has volume mount type {scontext} which is not supported. Using "z" instead.') + params['container_volume_mounts'].append(f'{src}:{dest}:{scontext}') elif this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER - 1: src, dest = this_path.split(':') diff --git a/awx/main/tasks/receptor.py b/awx/main/tasks/receptor.py index 4cb0a543a2..2bdc0223ea 100644 --- a/awx/main/tasks/receptor.py +++ b/awx/main/tasks/receptor.py @@ -26,6 +26,8 @@ from awx.main.utils.common import ( parse_yaml_or_json, cleanup_new_process, ) +from awx.main.constants import MAX_ISOLATED_PATH_COLON_DELIMITER + # Receptorctl from receptorctl.socket_interface import ReceptorControl @@ -488,6 +490,48 @@ class AWXReceptorJob: if self.task.instance.execution_environment.pull: pod_spec['spec']['containers'][0]['imagePullPolicy'] = pull_options[self.task.instance.execution_environment.pull] + # This allows the user to also expose the isolated path list + # to EEs running in k8s/ocp environments, i.e. container groups. + # This assumes the node and SA supports hostPath volumes + # type is not passed due to backward compatibility, + # which means that no checks will be performed before mounting the hostPath volume. + if settings.AWX_MOUNT_ISOLATED_PATHS_ON_K8S and settings.AWX_ISOLATION_SHOW_PATHS: + spec_volume_mounts = [] + spec_volumes = [] + + for idx, this_path in enumerate(settings.AWX_ISOLATION_SHOW_PATHS): + scontext = None + if this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER: + src, dest, scontext = this_path.split(':') + elif this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER - 1: + src, dest = this_path.split(':') + else: + src = dest = this_path + + # Enforce read-only volume if 'ro' has been explicitly passed + # We do this so we can use the same configuration for regular scenarios and k8s + # Since flags like ':O', ':z' or ':Z' are not valid in the k8s realm + # Example: /data:/data:ro + read_only = bool('ro' == scontext) + + # Since type is not being passed, k8s by default will not perform any checks if the + # hostPath volume exists on the k8s node itself. + spec_volumes.append({'name': f'volume-{idx}', 'hostPath': {'path': src}}) + + spec_volume_mounts.append({'name': f'volume-{idx}', 'mountPath': f'{dest}', 'readOnly': read_only}) + + # merge any volumes definition already present in the pod_spec + if 'volumes' in pod_spec['spec']: + pod_spec['spec']['volumes'] += spec_volumes + else: + pod_spec['spec']['volumes'] = spec_volumes + + # merge any volumesMounts definition already present in the pod_spec + if 'volumeMounts' in pod_spec['spec']['containers'][0]: + pod_spec['spec']['containers'][0]['volumeMounts'] += spec_volume_mounts + else: + pod_spec['spec']['containers'][0]['volumeMounts'] = spec_volume_mounts + if self.task and self.task.instance.is_container_group_task: # If EE credential is passed, create an imagePullSecret if self.task.instance.execution_environment and self.task.instance.execution_environment.credential: diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 9d0078916d..bc3c2549c3 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -1001,3 +1001,6 @@ DEFAULT_CONTROL_PLANE_QUEUE_NAME = 'controlplane' # For example, to disable SELinux in containers for podman # DEFAULT_CONTAINER_RUN_OPTIONS = ['--security-opt', 'label=disable'] DEFAULT_CONTAINER_RUN_OPTIONS = ['--network', 'slirp4netns:enable_ipv6=true'] + +# Mount exposed paths as hostPath resource in k8s/ocp +AWX_MOUNT_ISOLATED_PATHS_ON_K8S = False diff --git a/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js b/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js index fec8d6cdb8..22066243b7 100644 --- a/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js +++ b/awx/ui/src/screens/Setting/Jobs/JobsEdit/JobsEdit.js @@ -212,6 +212,10 @@ function JobsEdit() { name="AWX_ISOLATION_SHOW_PATHS" config={jobs.AWX_ISOLATION_SHOW_PATHS} /> + {submitError && } {revertError && } diff --git a/awx/ui/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json b/awx/ui/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json index 749249494a..9736f3794b 100644 --- a/awx/ui/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json +++ b/awx/ui/src/screens/Setting/Jobs/JobsEdit/data.defaultJobSettings.json @@ -27,6 +27,7 @@ "AWX_ISOLATION_SHOW_PATHS": [], "AWX_ROLES_ENABLED": true, "AWX_SHOW_PLAYBOOK_LINKS": false, + "AWX_MOUNT_ISOLATED_PATHS_ON_K8S": false, "AWX_TASK_ENV": {}, "DEFAULT_INVENTORY_UPDATE_TIMEOUT": 0, "DEFAULT_JOB_TIMEOUT": 0, From 169da866f3561434bbd3b21fad0dac3c82365cff Mon Sep 17 00:00:00 2001 From: Marliana Lara Date: Tue, 1 Feb 2022 15:47:10 -0500 Subject: [PATCH 056/125] Add UI unit tests to job settings --- .../Setting/Jobs/JobsDetail/JobsDetail.test.js | 1 + .../Setting/shared/data.allSettingOptions.json | 17 +++++++++++++++++ .../Setting/shared/data.allSettings.json | 3 ++- .../Setting/shared/data.jobSettings.json | 3 ++- 4 files changed, 22 insertions(+), 2 deletions(-) diff --git a/awx/ui/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.js b/awx/ui/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.js index 9b0dda4233..97110e3169 100644 --- a/awx/ui/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.js +++ b/awx/ui/src/screens/Setting/Jobs/JobsDetail/JobsDetail.test.js @@ -69,6 +69,7 @@ describe('', () => { assertDetail(wrapper, 'Default Project Update Timeout', '0 seconds'); assertDetail(wrapper, 'Per-Host Ansible Fact Cache Timeout', '0 seconds'); assertDetail(wrapper, 'Maximum number of forks per job', '200'); + assertDetail(wrapper, 'Expose host paths for Container Groups', 'Off'); assertVariableDetail( wrapper, 'Ansible Modules Allowed for Ad Hoc Jobs', diff --git a/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json b/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json index 21cdae90c6..ab0bc3f8e1 100644 --- a/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json +++ b/awx/ui/src/screens/Setting/shared/data.allSettingOptions.json @@ -276,6 +276,15 @@ "category_slug": "jobs", "default": false }, + "AWX_MOUNT_ISOLATED_PATHS_ON_K8S": { + "type": "boolean", + "required": false, + "label": "Expose host paths for Container Groups", + "help_text": "Expose paths via hostPath for the Pods created by a Container Group. HostPath volumes present many security risks, and it is a best practice to avoid the use of HostPaths when possible. ", + "category": "Jobs", + "category_slug": "jobs", + "default": false + }, "GALAXY_IGNORE_CERTS": { "type": "boolean", "required": false, @@ -3973,6 +3982,14 @@ "category_slug": "jobs", "defined_in_file": false }, + "AWX_MOUNT_ISOLATED_PATHS_ON_K8S": { + "type": "boolean", + "label": "Expose host paths for Container Groups", + "help_text": "Expose paths via hostPath for the Pods created by a Container Group. HostPath volumes present many security risks, and it is a best practice to avoid the use of HostPaths when possible. ", + "category": "Jobs", + "category_slug": "jobs", + "defined_in_file": false + }, "GALAXY_IGNORE_CERTS": { "type": "boolean", "label": "Ignore Ansible Galaxy SSL Certificate Verification", diff --git a/awx/ui/src/screens/Setting/shared/data.allSettings.json b/awx/ui/src/screens/Setting/shared/data.allSettings.json index 4715c4e03e..555713c239 100644 --- a/awx/ui/src/screens/Setting/shared/data.allSettings.json +++ b/awx/ui/src/screens/Setting/shared/data.allSettings.json @@ -297,5 +297,6 @@ "users":{"fields":["username"],"adj_list":[]}, "instances":{"fields":["hostname"],"adj_list":[]} }, - "DEFAULT_EXECUTION_ENVIRONMENT": 1 + "DEFAULT_EXECUTION_ENVIRONMENT": 1, + "AWX_MOUNT_ISOLATED_PATHS_ON_K8S": false } diff --git a/awx/ui/src/screens/Setting/shared/data.jobSettings.json b/awx/ui/src/screens/Setting/shared/data.jobSettings.json index 1815cc12b7..e24eedb36d 100644 --- a/awx/ui/src/screens/Setting/shared/data.jobSettings.json +++ b/awx/ui/src/screens/Setting/shared/data.jobSettings.json @@ -21,5 +21,6 @@ "DEFAULT_INVENTORY_UPDATE_TIMEOUT": 0, "DEFAULT_PROJECT_UPDATE_TIMEOUT": 0, "ANSIBLE_FACT_CACHE_TIMEOUT": 0, - "MAX_FORKS": 200 + "MAX_FORKS": 200, + "AWX_MOUNT_ISOLATED_PATHS_ON_K8S": false } From 864514729287f502dedf9153cbf946c53ba742c0 Mon Sep 17 00:00:00 2001 From: Marcelo Moreira de Mello Date: Fri, 4 Feb 2022 12:45:02 -0500 Subject: [PATCH 057/125] Renamed scontext variable to mount_options --- awx/main/tasks/jobs.py | 13 ++++++------- awx/main/tasks/receptor.py | 6 +++--- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py index 54ce9b2f3d..23bf8faa68 100644 --- a/awx/main/tasks/jobs.py +++ b/awx/main/tasks/jobs.py @@ -164,15 +164,14 @@ class BaseTask(object): # Using z allows the dir to be mounted by multiple containers # Uppercase Z restricts access (in weird ways) to 1 container at a time if this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER: - src, dest, scontext = this_path.split(':') + src, dest, mount_option = this_path.split(':') - # scontext validation via performed via API, but since this can be overriden via settings.py - # let's ensure scontext is one that we support - if scontext not in CONTAINER_VOLUMES_MOUNT_TYPES: - scontext = 'z' - logger.warn(f'The path {this_path} has volume mount type {scontext} which is not supported. Using "z" instead.') + # mount_option validation via performed via API, but since this can be overriden via settings.py + if mount_option not in CONTAINER_VOLUMES_MOUNT_TYPES: + mount_option = 'z' + logger.warn(f'The path {this_path} has volume mount type {mount_option} which is not supported. Using "z" instead.') - params['container_volume_mounts'].append(f'{src}:{dest}:{scontext}') + params['container_volume_mounts'].append(f'{src}:{dest}:{mount_option}') elif this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER - 1: src, dest = this_path.split(':') params['container_volume_mounts'].append(f'{src}:{dest}:z') diff --git a/awx/main/tasks/receptor.py b/awx/main/tasks/receptor.py index 2bdc0223ea..278f4dbbc6 100644 --- a/awx/main/tasks/receptor.py +++ b/awx/main/tasks/receptor.py @@ -500,9 +500,9 @@ class AWXReceptorJob: spec_volumes = [] for idx, this_path in enumerate(settings.AWX_ISOLATION_SHOW_PATHS): - scontext = None + mount_option = None if this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER: - src, dest, scontext = this_path.split(':') + src, dest, mount_option = this_path.split(':') elif this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER - 1: src, dest = this_path.split(':') else: @@ -512,7 +512,7 @@ class AWXReceptorJob: # We do this so we can use the same configuration for regular scenarios and k8s # Since flags like ':O', ':z' or ':Z' are not valid in the k8s realm # Example: /data:/data:ro - read_only = bool('ro' == scontext) + read_only = bool('ro' == mount_option) # Since type is not being passed, k8s by default will not perform any checks if the # hostPath volume exists on the k8s node itself. From 2df3ca547bdda44a2e247237a4b13b1c19a8b6fe Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Mon, 28 Feb 2022 08:28:36 -0500 Subject: [PATCH 058/125] Reimplement transmitter thread as future This avoids the need for an explicit `.join()`, and removes the need for the TransmitterThread wrapper class. --- awx/main/tasks/receptor.py | 76 +++++++++++++++++--------------------- 1 file changed, 33 insertions(+), 43 deletions(-) diff --git a/awx/main/tasks/receptor.py b/awx/main/tasks/receptor.py index 4cb0a543a2..0321a50c80 100644 --- a/awx/main/tasks/receptor.py +++ b/awx/main/tasks/receptor.py @@ -7,8 +7,6 @@ import logging import os import shutil import socket -import sys -import threading import time import yaml @@ -247,16 +245,6 @@ def worker_cleanup(node_name, vargs, timeout=300.0): return stdout -class TransmitterThread(threading.Thread): - def run(self): - self.exc = None - - try: - super().run() - except Exception: - self.exc = sys.exc_info() - - class AWXReceptorJob: def __init__(self, task, runner_params=None): self.task = task @@ -296,41 +284,43 @@ class AWXReceptorJob: # reading. sockin, sockout = socket.socketpair() - transmitter_thread = TransmitterThread(target=self.transmit, args=[sockin]) - transmitter_thread.start() + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: + transmitter_future = executor.submit(self.transmit, sockin) - # submit our work, passing - # in the right side of our socketpair for reading. - _kw = {} - if self.work_type == 'ansible-runner': - _kw['node'] = self.task.instance.execution_node - use_stream_tls = get_conn_type(_kw['node'], receptor_ctl).name == "STREAMTLS" - _kw['tlsclient'] = get_tls_client(use_stream_tls) - result = receptor_ctl.submit_work(worktype=self.work_type, payload=sockout.makefile('rb'), params=self.receptor_params, signwork=self.sign_work, **_kw) - self.unit_id = result['unitid'] - # Update the job with the work unit in-memory so that the log_lifecycle - # will print out the work unit that is to be associated with the job in the database - # via the update_model() call. - # We want to log the work_unit_id as early as possible. A failure can happen in between - # when we start the job in receptor and when we associate the job <-> work_unit_id. - # In that case, there will be work running in receptor and Controller will not know - # which Job it is associated with. - # We do not programatically handle this case. Ideally, we would handle this with a reaper case. - # The two distinct job lifecycle log events below allow for us to at least detect when this - # edge case occurs. If the lifecycle event work_unit_id_received occurs without the - # work_unit_id_assigned event then this case may have occured. - self.task.instance.work_unit_id = result['unitid'] # Set work_unit_id in-memory only - self.task.instance.log_lifecycle("work_unit_id_received") - self.task.update_model(self.task.instance.pk, work_unit_id=result['unitid']) - self.task.instance.log_lifecycle("work_unit_id_assigned") + _kw = {} + if self.work_type == 'ansible-runner': + _kw['node'] = self.task.instance.execution_node + use_stream_tls = get_conn_type(_kw['node'], receptor_ctl).name == "STREAMTLS" + _kw['tlsclient'] = get_tls_client(use_stream_tls) - sockin.close() - sockout.close() + # submit our work, passing in the right side of our socketpair for reading. + result = receptor_ctl.submit_work( + worktype=self.work_type, payload=sockout.makefile('rb'), params=self.receptor_params, signwork=self.sign_work, **_kw + ) - if transmitter_thread.exc: - raise transmitter_thread.exc[1].with_traceback(transmitter_thread.exc[2]) + sockin.close() + sockout.close() - transmitter_thread.join() + self.unit_id = result['unitid'] + # Update the job with the work unit in-memory so that the log_lifecycle + # will print out the work unit that is to be associated with the job in the database + # via the update_model() call. + # We want to log the work_unit_id as early as possible. A failure can happen in between + # when we start the job in receptor and when we associate the job <-> work_unit_id. + # In that case, there will be work running in receptor and Controller will not know + # which Job it is associated with. + # We do not programatically handle this case. Ideally, we would handle this with a reaper case. + # The two distinct job lifecycle log events below allow for us to at least detect when this + # edge case occurs. If the lifecycle event work_unit_id_received occurs without the + # work_unit_id_assigned event then this case may have occured. + self.task.instance.work_unit_id = result['unitid'] # Set work_unit_id in-memory only + self.task.instance.log_lifecycle("work_unit_id_received") + self.task.update_model(self.task.instance.pk, work_unit_id=result['unitid']) + self.task.instance.log_lifecycle("work_unit_id_assigned") + + # Throws an exception if the transmit failed. + # Will be caught by the try/except in BaseTask#run. + transmitter_future.result() # Artifacts are an output, but sometimes they are an input as well # this is the case with fact cache, where clearing facts deletes a file, and this must be captured From 8c21a2aa9e5f94b5837e3c6d22fa65c463979c44 Mon Sep 17 00:00:00 2001 From: Marliana Lara Date: Tue, 22 Feb 2022 15:32:38 -0500 Subject: [PATCH 059/125] Add schedule detail to job list expanded view --- .../components/DetailList/LaunchedByDetail.js | 39 +------ awx/ui/src/components/JobList/JobListItem.js | 12 ++ .../components/JobList/JobListItem.test.js | 13 +++ .../screens/Job/JobDetail/JobDetail.test.js | 1 + awx/ui/src/util/getScheduleUrl.js | 32 ++++++ awx/ui/src/util/getScheduleUrl.test.js | 103 ++++++++++++++++++ 6 files changed, 166 insertions(+), 34 deletions(-) create mode 100644 awx/ui/src/util/getScheduleUrl.js create mode 100644 awx/ui/src/util/getScheduleUrl.test.js diff --git a/awx/ui/src/components/DetailList/LaunchedByDetail.js b/awx/ui/src/components/DetailList/LaunchedByDetail.js index 099d2385a7..6f542231c5 100644 --- a/awx/ui/src/components/DetailList/LaunchedByDetail.js +++ b/awx/ui/src/components/DetailList/LaunchedByDetail.js @@ -1,45 +1,16 @@ import React from 'react'; import { Link } from 'react-router-dom'; import { t } from '@lingui/macro'; +import getScheduleUrl from 'util/getScheduleUrl'; import Detail from './Detail'; -function getScheduleURL(template, scheduleId, inventoryId = null) { - let scheduleUrl; - - switch (template.unified_job_type) { - case 'inventory_update': - scheduleUrl = - inventoryId && - `/inventories/inventory/${inventoryId}/sources/${template.id}/schedules/${scheduleId}/details`; - break; - case 'job': - scheduleUrl = `/templates/job_template/${template.id}/schedules/${scheduleId}/details`; - break; - case 'project_update': - scheduleUrl = `/projects/${template.id}/schedules/${scheduleId}/details`; - break; - case 'system_job': - scheduleUrl = `/management_jobs/${template.id}/schedules/${scheduleId}/details`; - break; - case 'workflow_job': - scheduleUrl = `/templates/workflow_job_template/${template.id}/schedules/${scheduleId}/details`; - break; - default: - break; - } - - return scheduleUrl; -} - -const getLaunchedByDetails = ({ summary_fields = {}, launch_type }) => { +const getLaunchedByDetails = (job) => { const { created_by: createdBy, job_template: jobTemplate, - unified_job_template: unifiedJT, workflow_job_template: workflowJT, - inventory, schedule, - } = summary_fields; + } = job.summary_fields; if (!createdBy && !schedule) { return {}; @@ -48,7 +19,7 @@ const getLaunchedByDetails = ({ summary_fields = {}, launch_type }) => { let link; let value; - switch (launch_type) { + switch (job.launch_type) { case 'webhook': value = t`Webhook`; link = @@ -58,7 +29,7 @@ const getLaunchedByDetails = ({ summary_fields = {}, launch_type }) => { break; case 'scheduled': value = schedule.name; - link = getScheduleURL(unifiedJT, schedule.id, inventory?.id); + link = getScheduleUrl(job); break; case 'manual': link = `/users/${createdBy.id}/details`; diff --git a/awx/ui/src/components/JobList/JobListItem.js b/awx/ui/src/components/JobList/JobListItem.js index 9967b1b154..93f398c741 100644 --- a/awx/ui/src/components/JobList/JobListItem.js +++ b/awx/ui/src/components/JobList/JobListItem.js @@ -8,6 +8,7 @@ import { RocketIcon } from '@patternfly/react-icons'; import styled from 'styled-components'; import { formatDateString } from 'util/dates'; import { isJobRunning } from 'util/jobs'; +import getScheduleUrl from 'util/getScheduleUrl'; import { ActionsTd, ActionItem, TdBreakWord } from '../PaginatedTable'; import { LaunchButton, ReLaunchDropDown } from '../LaunchButton'; import StatusLabel from '../StatusLabel'; @@ -167,6 +168,17 @@ function JobListItem({ /> )} + {job.launch_type === 'scheduled' && ( + + {job.summary_fields.schedule.name} + + } + /> + )} {job_template && ( ', () => { test('initially renders successfully', () => { expect(wrapper.find('JobListItem').length).toBe(1); + }); + + test('should display expected details', () => { assertDetail('Job Slice', '1/3'); + assertDetail('Schedule', 'mock schedule'); }); test('launch button shown to users with launch capabilities', () => { diff --git a/awx/ui/src/screens/Job/JobDetail/JobDetail.test.js b/awx/ui/src/screens/Job/JobDetail/JobDetail.test.js index da8b7048fe..33487f687d 100644 --- a/awx/ui/src/screens/Job/JobDetail/JobDetail.test.js +++ b/awx/ui/src/screens/Job/JobDetail/JobDetail.test.js @@ -189,6 +189,7 @@ describe('', () => { { + test('should return expected schedule URL for inventory update job', () => { + const invSrcJob = { + type: 'inventory_update', + summary_fields: { + inventory: { + id: 1, + name: 'mock inv', + }, + schedule: { + name: 'mock schedule', + id: 3, + }, + unified_job_template: { + unified_job_type: 'inventory_update', + name: 'mock inv src', + id: 2, + }, + }, + }; + expect(getScheduleUrl(invSrcJob)).toEqual( + '/inventories/inventory/1/sources/2/schedules/3/details' + ); + }); + test('should return expected schedule URL for job', () => { + const templateJob = { + type: 'job', + summary_fields: { + schedule: { + name: 'mock schedule', + id: 5, + }, + unified_job_template: { + unified_job_type: 'job', + name: 'mock job', + id: 4, + }, + }, + }; + expect(getScheduleUrl(templateJob)).toEqual( + '/templates/job_template/4/schedules/5/details' + ); + }); + test('should return expected schedule URL for project update job', () => { + const projectUpdateJob = { + type: 'project_update', + summary_fields: { + schedule: { + name: 'mock schedule', + id: 7, + }, + unified_job_template: { + unified_job_type: 'project_update', + name: 'mock job', + id: 6, + }, + }, + }; + expect(getScheduleUrl(projectUpdateJob)).toEqual( + '/projects/6/schedules/7/details' + ); + }); + test('should return expected schedule URL for system job', () => { + const systemJob = { + type: 'system_job', + summary_fields: { + schedule: { + name: 'mock schedule', + id: 10, + }, + unified_job_template: { + unified_job_type: 'system_job', + name: 'mock job', + id: 9, + }, + }, + }; + expect(getScheduleUrl(systemJob)).toEqual( + '/management_jobs/9/schedules/10/details' + ); + }); + test('should return expected schedule URL for workflow job', () => { + const workflowJob = { + type: 'workflow_job', + summary_fields: { + schedule: { + name: 'mock schedule', + id: 12, + }, + unified_job_template: { + unified_job_type: 'job', + name: 'mock job', + id: 11, + }, + }, + }; + expect(getScheduleUrl(workflowJob)).toEqual( + '/templates/workflow_job_template/11/schedules/12/details' + ); + }); +}); From 1aefd39782874c24c9e370a17e081c538381dde2 Mon Sep 17 00:00:00 2001 From: Marliana Lara Date: Mon, 28 Feb 2022 15:51:36 -0500 Subject: [PATCH 060/125] Show deleted detail for deleted schedules --- awx/ui/src/components/JobList/JobListItem.js | 31 ++++++++++++------- .../components/JobList/JobListItem.test.js | 19 ++++++++++++ 2 files changed, 38 insertions(+), 12 deletions(-) diff --git a/awx/ui/src/components/JobList/JobListItem.js b/awx/ui/src/components/JobList/JobListItem.js index 93f398c741..364b4279c0 100644 --- a/awx/ui/src/components/JobList/JobListItem.js +++ b/awx/ui/src/components/JobList/JobListItem.js @@ -12,7 +12,12 @@ import getScheduleUrl from 'util/getScheduleUrl'; import { ActionsTd, ActionItem, TdBreakWord } from '../PaginatedTable'; import { LaunchButton, ReLaunchDropDown } from '../LaunchButton'; import StatusLabel from '../StatusLabel'; -import { DetailList, Detail, LaunchedByDetail } from '../DetailList'; +import { + DetailList, + Detail, + DeletedDetail, + LaunchedByDetail, +} from '../DetailList'; import ChipGroup from '../ChipGroup'; import CredentialChip from '../CredentialChip'; import ExecutionEnvironmentDetail from '../ExecutionEnvironmentDetail'; @@ -49,6 +54,7 @@ function JobListItem({ job_template, labels, project, + schedule, source_workflow_job, workflow_job_template, } = job.summary_fields; @@ -168,17 +174,18 @@ function JobListItem({ /> )} - {job.launch_type === 'scheduled' && ( - - {job.summary_fields.schedule.name} - - } - /> - )} + {job.launch_type === 'scheduled' && + (schedule ? ( + {schedule.name} + } + /> + ) : ( + + ))} {job_template && ( ', () => { expect(wrapper.find('Td[dataLabel="Type"]').length).toBe(1); }); + test('should not show schedule detail in expanded view', () => { + wrapper = mountWithContexts( + + + {}} + /> + +
+ ); + expect(wrapper.find('Detail[label="Schedule"] dt').length).toBe(1); + }); + test('should not display EE for canceled jobs', () => { wrapper = mountWithContexts( From f6e18bbf0600bab48a40da65179ca53cd6e896ec Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Mon, 28 Feb 2022 16:29:25 -0500 Subject: [PATCH 061/125] Publish to galaxy and pypi in promote workflow --- .github/workflows/promote.yml | 47 +++++++++++++++++++++++++++++++++++ Makefile | 7 +++++- 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/.github/workflows/promote.yml b/.github/workflows/promote.yml index 3955d7fcd7..f21bc69672 100644 --- a/.github/workflows/promote.yml +++ b/.github/workflows/promote.yml @@ -8,6 +8,53 @@ jobs: promote: runs-on: ubuntu-latest steps: + - name: Checkout awx + uses: actions/checkout@v2 + + - name: Get python version from Makefile + run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV + + - name: Install python ${{ env.py_version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ env.py_version }} + + - name: Install dependencies + run: | + python${{ env.py_version }} -m pip install wheel twine + + - name: Set official collection namespace + run: echo collection_namespace=awx >> $GITHUB_ENV + if: ${{ github.repository_owner == 'ansible' }} + + - name: Set unofficial collection namespace + run: echo collection_namespace=${{ github.repository_owner }} >> $GITHUB_ENV + if: ${{ github.repository_owner != 'ansible' }} + + - name: Build collection and publish to galaxy + run: | + COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection + ansible-galaxy collection publish \ + --token=${{ secrets.GALAXY_TOKEN }} \ + awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz + + - name: Set official pypi info + run: echo pypi_repo=pypi >> $GITHUB_ENV + if: ${{ github.repository_owner == 'ansible' }} + + - name: Set unofficial pypi info + run: echo pypi_repo=testpypi >> $GITHUB_ENV + if: ${{ github.repository_owner != 'ansible' }} + + - name: Build awxkit and upload to pypi + run: | + cd awxkit && python3 setup.py bdist_wheel + twine upload \ + -r ${{ env.pypi_repo }} \ + -u ${{ secrets.PYPI_USERNAME }} \ + -p ${{ secrets.PYPI_PASSWORD }} \ + dist/* + - name: Log in to GHCR run: | echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin diff --git a/Makefile b/Makefile index 6f4680399d..839851f9e2 100644 --- a/Makefile +++ b/Makefile @@ -305,7 +305,7 @@ symlink_collection: mkdir -p ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE) # in case it does not exist ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL) -build_collection: +awx_collection_build: $(shell find awx_collection -type f) ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \ -e collection_package=$(COLLECTION_PACKAGE) \ -e collection_namespace=$(COLLECTION_NAMESPACE) \ @@ -313,6 +313,8 @@ build_collection: -e '{"awx_template_version":false}' ansible-galaxy collection build awx_collection_build --force --output-path=awx_collection_build +build_collection: awx_collection_build + install_collection: build_collection rm -rf $(COLLECTION_INSTALL) ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz @@ -567,3 +569,6 @@ messages: . $(VENV_BASE)/awx/bin/activate; \ fi; \ $(PYTHON) manage.py makemessages -l $(LANG) --keep-pot + +print-%: + @echo $($*) From 4f505486e39945fb4e253b6d51d73847b819a29a Mon Sep 17 00:00:00 2001 From: Keith Grant Date: Tue, 1 Mar 2022 12:59:24 -0800 Subject: [PATCH 062/125] Add Toast messages when resources are copied (#11758) * create useToast hook * add copy success toast message to credentials/inventories * add Toast tests * add copy success toast to template/ee/project lists * move Toast type to types.js --- .../components/TemplateList/TemplateList.js | 17 ++ .../TemplateList/TemplateListItem.js | 11 +- awx/ui/src/hooks/useToast.js | 64 +++++ awx/ui/src/hooks/useToast.test.js | 124 ++++++++ .../CredentialList/CredentialList.js | 195 +++++++------ .../CredentialList/CredentialListItem.js | 9 +- .../ExecutionEnvironmentList.js | 16 ++ .../ExecutionEnvironmentListItem.js | 15 +- .../Inventory/InventoryList/InventoryList.js | 264 ++++++++++-------- .../InventoryList/InventoryListItem.js | 8 +- .../NotificationTemplateList.js | 89 +++--- .../Project/ProjectList/ProjectList.js | 16 ++ .../Project/ProjectList/ProjectListItem.js | 8 +- awx/ui/src/types.js | 9 + 14 files changed, 567 insertions(+), 278 deletions(-) create mode 100644 awx/ui/src/hooks/useToast.js create mode 100644 awx/ui/src/hooks/useToast.test.js diff --git a/awx/ui/src/components/TemplateList/TemplateList.js b/awx/ui/src/components/TemplateList/TemplateList.js index 01fef24065..9e9997dd63 100644 --- a/awx/ui/src/components/TemplateList/TemplateList.js +++ b/awx/ui/src/components/TemplateList/TemplateList.js @@ -12,6 +12,7 @@ import useSelected from 'hooks/useSelected'; import useExpanded from 'hooks/useExpanded'; import { getQSConfig, parseQueryString } from 'util/qs'; import useWsTemplates from 'hooks/useWsTemplates'; +import useToast, { AlertVariant } from 'hooks/useToast'; import { relatedResourceDeleteRequests } from 'util/getRelatedResourceDeleteDetails'; import AlertModal from '../AlertModal'; import DatalistToolbar from '../DataListToolbar'; @@ -41,6 +42,8 @@ function TemplateList({ defaultParams }) { ); const location = useLocation(); + const { addToast, Toast, toastProps } = useToast(); + const { result: { results, @@ -123,6 +126,18 @@ function TemplateList({ defaultParams }) { } ); + const handleCopy = useCallback( + (newTemplateId) => { + addToast({ + id: newTemplateId, + title: t`Template copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const handleTemplateDelete = async () => { await deleteTemplates(); clearSelected(); @@ -266,6 +281,7 @@ function TemplateList({ defaultParams }) { onSelect={() => handleSelect(template)} isExpanded={expanded.some((row) => row.id === template.id)} onExpand={() => handleExpand(template)} + onCopy={handleCopy} isSelected={selected.some((row) => row.id === template.id)} fetchTemplates={fetchTemplates} rowIndex={index} @@ -274,6 +290,7 @@ function TemplateList({ defaultParams }) { emptyStateControls={(canAddJT || canAddWFJT) && addButton} /> + { + let response; if (template.type === 'job_template') { - await JobTemplatesAPI.copy(template.id, { + response = await JobTemplatesAPI.copy(template.id, { name: `${template.name} @ ${timeOfDay()}`, }); } else { - await WorkflowJobTemplatesAPI.copy(template.id, { + response = await WorkflowJobTemplatesAPI.copy(template.id, { name: `${template.name} @ ${timeOfDay()}`, }); } + if (response.status === 201) { + onCopy(response.data.id); + } await fetchTemplates(); - }, [fetchTemplates, template.id, template.name, template.type]); + }, [fetchTemplates, template.id, template.name, template.type, onCopy]); const handleCopyStart = useCallback(() => { setIsDisabled(true); diff --git a/awx/ui/src/hooks/useToast.js b/awx/ui/src/hooks/useToast.js new file mode 100644 index 0000000000..0f5ec1da61 --- /dev/null +++ b/awx/ui/src/hooks/useToast.js @@ -0,0 +1,64 @@ +import React, { useState, useCallback } from 'react'; +import { + AlertGroup, + Alert, + AlertActionCloseButton, + AlertVariant, +} from '@patternfly/react-core'; +import { arrayOf, func } from 'prop-types'; +import { Toast as ToastType } from 'types'; + +export default function useToast() { + const [toasts, setToasts] = useState([]); + + const addToast = useCallback((newToast) => { + setToasts((oldToasts) => [...oldToasts, newToast]); + }, []); + + const removeToast = useCallback((toastId) => { + setToasts((oldToasts) => oldToasts.filter((t) => t.id !== toastId)); + }, []); + + return { + addToast, + removeToast, + Toast, + toastProps: { + toasts, + removeToast, + }, + }; +} + +export function Toast({ toasts, removeToast }) { + if (!toasts.length) { + return null; + } + + return ( + + {toasts.map((toast) => ( + removeToast(toast.id)} /> + } + onTimeout={() => removeToast(toast.id)} + timeout={toast.hasTimeout} + title={toast.title} + variant={toast.variant} + key={`toast-message-${toast.id}`} + ouiaId={`toast-message-${toast.id}`} + > + {toast.message} + + ))} + + ); +} + +Toast.propTypes = { + toasts: arrayOf(ToastType).isRequired, + removeToast: func.isRequired, +}; + +export { AlertVariant }; diff --git a/awx/ui/src/hooks/useToast.test.js b/awx/ui/src/hooks/useToast.test.js new file mode 100644 index 0000000000..23b6ca845f --- /dev/null +++ b/awx/ui/src/hooks/useToast.test.js @@ -0,0 +1,124 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { shallow, mount } from 'enzyme'; +import useToast, { Toast, AlertVariant } from './useToast'; + +describe('useToast', () => { + const Child = () =>
; + const Test = () => { + const toastVals = useToast(); + return ; + }; + + test('should provide Toast component', () => { + const wrapper = mount(); + + expect(wrapper.find('Child').prop('Toast')).toEqual(Toast); + }); + + test('should add toast', () => { + const wrapper = mount(); + + expect(wrapper.find('Child').prop('toastProps').toasts).toEqual([]); + act(() => { + wrapper.find('Child').prop('addToast')({ + message: 'one', + id: 1, + variant: 'success', + }); + }); + wrapper.update(); + + expect(wrapper.find('Child').prop('toastProps').toasts).toEqual([ + { + message: 'one', + id: 1, + variant: 'success', + }, + ]); + }); + + test('should remove toast', () => { + const wrapper = mount(); + + act(() => { + wrapper.find('Child').prop('addToast')({ + message: 'one', + id: 1, + variant: 'success', + }); + }); + wrapper.update(); + expect(wrapper.find('Child').prop('toastProps').toasts).toHaveLength(1); + act(() => { + wrapper.find('Child').prop('removeToast')(1); + }); + wrapper.update(); + + expect(wrapper.find('Child').prop('toastProps').toasts).toHaveLength(0); + }); +}); + +describe('Toast', () => { + test('should render nothing with no toasts', () => { + const wrapper = shallow( {}} />); + expect(wrapper).toEqual({}); + }); + + test('should render toast alert', () => { + const toast = { + title: 'Inventory saved', + variant: AlertVariant.success, + id: 1, + message: 'the message', + }; + const wrapper = shallow( {}} />); + + const alert = wrapper.find('Alert'); + expect(alert.prop('title')).toEqual('Inventory saved'); + expect(alert.prop('variant')).toEqual('success'); + expect(alert.prop('ouiaId')).toEqual('toast-message-1'); + expect(alert.prop('children')).toEqual('the message'); + }); + + test('should call removeToast', () => { + const removeToast = jest.fn(); + const toast = { + title: 'Inventory saved', + variant: AlertVariant.success, + id: 1, + }; + const wrapper = shallow( + + ); + + const alert = wrapper.find('Alert'); + alert.prop('actionClose').props.onClose(1); + expect(removeToast).toHaveBeenCalledTimes(1); + }); + + test('should render multiple alerts', () => { + const toasts = [ + { + title: 'Inventory saved', + variant: AlertVariant.success, + id: 1, + message: 'the message', + }, + { + title: 'error saving', + variant: AlertVariant.danger, + id: 2, + }, + ]; + const wrapper = shallow( {}} />); + + const alert = wrapper.find('Alert'); + expect(alert).toHaveLength(2); + + expect(alert.at(0).prop('title')).toEqual('Inventory saved'); + expect(alert.at(0).prop('variant')).toEqual('success'); + expect(alert.at(1).prop('title')).toEqual('error saving'); + expect(alert.at(1).prop('variant')).toEqual('danger'); + }); +}); diff --git a/awx/ui/src/screens/Credential/CredentialList/CredentialList.js b/awx/ui/src/screens/Credential/CredentialList/CredentialList.js index ab8fa76004..c02b8d7047 100644 --- a/awx/ui/src/screens/Credential/CredentialList/CredentialList.js +++ b/awx/ui/src/screens/Credential/CredentialList/CredentialList.js @@ -4,6 +4,7 @@ import { t, Plural } from '@lingui/macro'; import { Card, PageSection } from '@patternfly/react-core'; import { CredentialsAPI } from 'api'; import useSelected from 'hooks/useSelected'; +import useToast, { AlertVariant } from 'hooks/useToast'; import AlertModal from 'components/AlertModal'; import ErrorDetail from 'components/ErrorDetail'; import DataListToolbar from 'components/DataListToolbar'; @@ -27,6 +28,8 @@ const QS_CONFIG = getQSConfig('credential', { function CredentialList() { const location = useLocation(); + const { addToast, Toast, toastProps } = useToast(); + const { result: { credentials, @@ -104,100 +107,116 @@ function CredentialList() { setSelected([]); }; + const handleCopy = useCallback( + (newCredentialId) => { + addToast({ + id: newCredentialId, + title: t`Credential copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const canAdd = actions && Object.prototype.hasOwnProperty.call(actions, 'POST'); const deleteDetailsRequests = relatedResourceDeleteRequests.credential( selected[0] ); return ( - - - - {t`Name`} - {t`Type`} - {t`Actions`} - - } - renderRow={(item, index) => ( - row.id === item.id)} - onSelect={() => handleSelect(item)} - rowIndex={index} - /> - )} - renderToolbar={(props) => ( - ] - : []), - - } - />, - ]} - /> - )} - /> - - - {t`Failed to delete one or more credentials.`} - - - + <> + + + + {t`Name`} + {t`Type`} + {t`Actions`} + + } + renderRow={(item, index) => ( + row.id === item.id)} + onSelect={() => handleSelect(item)} + onCopy={handleCopy} + rowIndex={index} + /> + )} + renderToolbar={(props) => ( + ] + : []), + + } + />, + ]} + /> + )} + /> + + + {t`Failed to delete one or more credentials.`} + + + + + ); } diff --git a/awx/ui/src/screens/Credential/CredentialList/CredentialListItem.js b/awx/ui/src/screens/Credential/CredentialList/CredentialListItem.js index ad54179832..83470149ae 100644 --- a/awx/ui/src/screens/Credential/CredentialList/CredentialListItem.js +++ b/awx/ui/src/screens/Credential/CredentialList/CredentialListItem.js @@ -18,7 +18,7 @@ function CredentialListItem({ detailUrl, isSelected, onSelect, - + onCopy, fetchCredentials, rowIndex, }) { @@ -28,11 +28,14 @@ function CredentialListItem({ const canEdit = credential.summary_fields.user_capabilities.edit; const copyCredential = useCallback(async () => { - await CredentialsAPI.copy(credential.id, { + const response = await CredentialsAPI.copy(credential.id, { name: `${credential.name} @ ${timeOfDay()}`, }); + if (response.status === 201) { + onCopy(response.data.id); + } await fetchCredentials(); - }, [credential.id, credential.name, fetchCredentials]); + }, [credential.id, credential.name, fetchCredentials, onCopy]); const handleCopyStart = useCallback(() => { setIsDisabled(true); diff --git a/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.js b/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.js index 547dd28507..6153f3217c 100644 --- a/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.js +++ b/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.js @@ -7,6 +7,7 @@ import { ExecutionEnvironmentsAPI } from 'api'; import { getQSConfig, parseQueryString } from 'util/qs'; import useRequest, { useDeleteItems } from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; +import useToast, { AlertVariant } from 'hooks/useToast'; import PaginatedTable, { HeaderRow, HeaderCell, @@ -29,6 +30,7 @@ const QS_CONFIG = getQSConfig('execution_environments', { function ExecutionEnvironmentList() { const location = useLocation(); const match = useRouteMatch(); + const { addToast, Toast, toastProps } = useToast(); const { error: contentError, @@ -94,6 +96,18 @@ function ExecutionEnvironmentList() { } ); + const handleCopy = useCallback( + (newId) => { + addToast({ + id: newId, + title: t`Execution environment copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const handleDelete = async () => { await deleteExecutionEnvironments(); clearSelected(); @@ -194,6 +208,7 @@ function ExecutionEnvironmentList() { executionEnvironment={executionEnvironment} detailUrl={`${match.url}/${executionEnvironment.id}/details`} onSelect={() => handleSelect(executionEnvironment)} + onCopy={handleCopy} isSelected={selected.some( (row) => row.id === executionEnvironment.id )} @@ -218,6 +233,7 @@ function ExecutionEnvironmentList() { {t`Failed to delete one or more execution environments`} + ); } diff --git a/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.js b/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.js index 35280d0204..8281c55a68 100644 --- a/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.js +++ b/awx/ui/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.js @@ -18,20 +18,28 @@ function ExecutionEnvironmentListItem({ detailUrl, isSelected, onSelect, + onCopy, rowIndex, fetchExecutionEnvironments, }) { const [isDisabled, setIsDisabled] = useState(false); const copyExecutionEnvironment = useCallback(async () => { - await ExecutionEnvironmentsAPI.copy(executionEnvironment.id, { - name: `${executionEnvironment.name} @ ${timeOfDay()}`, - }); + const response = await ExecutionEnvironmentsAPI.copy( + executionEnvironment.id, + { + name: `${executionEnvironment.name} @ ${timeOfDay()}`, + } + ); + if (response.status === 201) { + onCopy(response.data.id); + } await fetchExecutionEnvironments(); }, [ executionEnvironment.id, executionEnvironment.name, fetchExecutionEnvironments, + onCopy, ]); const handleCopyStart = useCallback(() => { @@ -114,6 +122,7 @@ ExecutionEnvironmentListItem.prototype = { detailUrl: string.isRequired, isSelected: bool.isRequired, onSelect: func.isRequired, + onCopy: func.isRequired, }; export default ExecutionEnvironmentListItem; diff --git a/awx/ui/src/screens/Inventory/InventoryList/InventoryList.js b/awx/ui/src/screens/Inventory/InventoryList/InventoryList.js index b1c6bcd032..22108eb30d 100644 --- a/awx/ui/src/screens/Inventory/InventoryList/InventoryList.js +++ b/awx/ui/src/screens/Inventory/InventoryList/InventoryList.js @@ -5,6 +5,7 @@ import { Card, PageSection, DropdownItem } from '@patternfly/react-core'; import { InventoriesAPI } from 'api'; import useRequest, { useDeleteItems } from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; +import useToast, { AlertVariant } from 'hooks/useToast'; import AlertModal from 'components/AlertModal'; import DatalistToolbar from 'components/DataListToolbar'; import ErrorDetail from 'components/ErrorDetail'; @@ -29,6 +30,7 @@ const QS_CONFIG = getQSConfig('inventory', { function InventoryList() { const location = useLocation(); const match = useRouteMatch(); + const { addToast, Toast, toastProps } = useToast(); const { result: { @@ -112,6 +114,18 @@ function InventoryList() { clearSelected(); }; + const handleCopy = useCallback( + (newInventoryId) => { + addToast({ + id: newInventoryId, + title: t`Inventory copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const hasContentLoading = isDeleteLoading || isLoading; const canAdd = actions && actions.POST; @@ -149,130 +163,134 @@ function InventoryList() { ); return ( - - - - {t`Name`} - {t`Status`} - {t`Type`} - {t`Organization`} - {t`Actions`} - - } - renderToolbar={(props) => ( - - } - warningMessage={ - - } - />, - ]} - /> - )} - renderRow={(inventory, index) => ( - { - if (!inventory.pending_deletion) { - handleSelect(inventory); + <> + + + + {t`Name`} + {t`Status`} + {t`Type`} + {t`Organization`} + {t`Actions`} + + } + renderToolbar={(props) => ( + + } + warningMessage={ + + } + />, + ]} + /> + )} + renderRow={(inventory, index) => ( + row.id === inventory.id)} - /> - )} - emptyStateControls={canAdd && addButton} - /> - - - {t`Failed to delete one or more inventories.`} - - - + onSelect={() => { + if (!inventory.pending_deletion) { + handleSelect(inventory); + } + }} + onCopy={handleCopy} + isSelected={selected.some((row) => row.id === inventory.id)} + /> + )} + emptyStateControls={canAdd && addButton} + /> + + + {t`Failed to delete one or more inventories.`} + + + + + ); } diff --git a/awx/ui/src/screens/Inventory/InventoryList/InventoryListItem.js b/awx/ui/src/screens/Inventory/InventoryList/InventoryListItem.js index 49a0456e8d..c692c32f51 100644 --- a/awx/ui/src/screens/Inventory/InventoryList/InventoryListItem.js +++ b/awx/ui/src/screens/Inventory/InventoryList/InventoryListItem.js @@ -18,6 +18,7 @@ function InventoryListItem({ rowIndex, isSelected, onSelect, + onCopy, detailUrl, fetchInventories, }) { @@ -30,11 +31,14 @@ function InventoryListItem({ const [isCopying, setIsCopying] = useState(false); const copyInventory = useCallback(async () => { - await InventoriesAPI.copy(inventory.id, { + const response = await InventoriesAPI.copy(inventory.id, { name: `${inventory.name} @ ${timeOfDay()}`, }); + if (response.status === 201) { + onCopy(response.data.id); + } await fetchInventories(); - }, [inventory.id, inventory.name, fetchInventories]); + }, [inventory.id, inventory.name, fetchInventories, onCopy]); const handleCopyStart = useCallback(() => { setIsCopying(true); diff --git a/awx/ui/src/screens/NotificationTemplate/NotificationTemplateList/NotificationTemplateList.js b/awx/ui/src/screens/NotificationTemplate/NotificationTemplateList/NotificationTemplateList.js index 81c3845e42..defa2ef920 100644 --- a/awx/ui/src/screens/NotificationTemplate/NotificationTemplateList/NotificationTemplateList.js +++ b/awx/ui/src/screens/NotificationTemplate/NotificationTemplateList/NotificationTemplateList.js @@ -1,14 +1,8 @@ -import React, { useCallback, useEffect, useState } from 'react'; +import React, { useCallback, useEffect } from 'react'; import { useLocation, useRouteMatch } from 'react-router-dom'; import { t } from '@lingui/macro'; -import { - Alert, - AlertActionCloseButton, - AlertGroup, - Card, - PageSection, -} from '@patternfly/react-core'; +import { Card, PageSection } from '@patternfly/react-core'; import { NotificationTemplatesAPI } from 'api'; import PaginatedTable, { HeaderRow, @@ -22,6 +16,7 @@ import ErrorDetail from 'components/ErrorDetail'; import DataListToolbar from 'components/DataListToolbar'; import useRequest, { useDeleteItems } from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; +import useToast, { AlertVariant } from 'hooks/useToast'; import { getQSConfig, parseQueryString } from 'util/qs'; import NotificationTemplateListItem from './NotificationTemplateListItem'; @@ -34,7 +29,8 @@ const QS_CONFIG = getQSConfig('notification-templates', { function NotificationTemplatesList() { const location = useLocation(); const match = useRouteMatch(); - const [testToasts, setTestToasts] = useState([]); + // const [testToasts, setTestToasts] = useState([]); + const { addToast, Toast, toastProps } = useToast(); const addUrl = `${match.url}/add`; @@ -107,18 +103,7 @@ function NotificationTemplatesList() { clearSelected(); }; - const addTestToast = useCallback((notification) => { - setTestToasts((oldToasts) => [...oldToasts, notification]); - }, []); - - const removeTestToast = (notificationId) => { - setTestToasts((oldToasts) => - oldToasts.filter((toast) => toast.id !== notificationId) - ); - }; - const canAdd = actions && actions.POST; - const alertGroupDataCy = 'notification-template-alerts'; return ( <> @@ -198,7 +183,35 @@ function NotificationTemplatesList() { } renderRow={(template, index) => ( { + if (notification.status === 'pending') { + return; + } + + let message; + if (notification.status === 'successful') { + message = t`Notification sent successfully`; + } + if (notification.status === 'failed') { + if (notification?.error === 'timed out') { + message = t`Notification timed out`; + } else { + message = notification.error; + } + } + + addToast({ + id: notification.id, + title: + notification.summary_fields.notification_template.name, + variant: + notification.status === 'failed' + ? AlertVariant.danger + : AlertVariant.success, + hasTimeout: notification.status !== 'failed', + message, + }); + }} key={template.id} fetchTemplates={fetchTemplates} template={template} @@ -223,39 +236,7 @@ function NotificationTemplatesList() { {t`Failed to delete one or more notification template.`} - - {testToasts - .filter((notification) => notification.status !== 'pending') - .map((notification) => ( - removeTestToast(notification.id)} - /> - } - onTimeout={() => removeTestToast(notification.id)} - timeout={notification.status !== 'failed'} - title={notification.summary_fields.notification_template.name} - variant={notification.status === 'failed' ? 'danger' : 'success'} - key={`notification-template-alert-${notification.id}`} - ouiaId={`notification-template-alert-${notification.id}`} - > - <> - {notification.status === 'successful' && ( -

{t`Notification sent successfully`}

- )} - {notification.status === 'failed' && - notification?.error === 'timed out' && ( -

{t`Notification timed out`}

- )} - {notification.status === 'failed' && - notification?.error !== 'timed out' && ( -

{notification.error}

- )} - -
- ))} -
+ ); } diff --git a/awx/ui/src/screens/Project/ProjectList/ProjectList.js b/awx/ui/src/screens/Project/ProjectList/ProjectList.js index e71571f2c6..6c3e829048 100644 --- a/awx/ui/src/screens/Project/ProjectList/ProjectList.js +++ b/awx/ui/src/screens/Project/ProjectList/ProjectList.js @@ -19,6 +19,7 @@ import PaginatedTable, { } from 'components/PaginatedTable'; import useSelected from 'hooks/useSelected'; import useExpanded from 'hooks/useExpanded'; +import useToast, { AlertVariant } from 'hooks/useToast'; import { relatedResourceDeleteRequests } from 'util/getRelatedResourceDeleteDetails'; import { getQSConfig, parseQueryString } from 'util/qs'; import useWsProjects from './useWsProjects'; @@ -34,6 +35,7 @@ const QS_CONFIG = getQSConfig('project', { function ProjectList() { const location = useLocation(); const match = useRouteMatch(); + const { addToast, Toast, toastProps } = useToast(); const { request: fetchUpdatedProject, @@ -123,6 +125,18 @@ function ProjectList() { } ); + const handleCopy = useCallback( + (newId) => { + addToast({ + id: newId, + title: t`Project copied successfully`, + variant: AlertVariant.success, + hasTimeout: true, + }); + }, + [addToast] + ); + const handleProjectDelete = async () => { await deleteProjects(); setSelected([]); @@ -255,6 +269,7 @@ function ProjectList() { detailUrl={`${match.url}/${project.id}`} isSelected={selected.some((row) => row.id === project.id)} onSelect={() => handleSelect(project)} + onCopy={handleCopy} rowIndex={index} onRefreshRow={(projectId) => fetchUpdatedProject(projectId)} /> @@ -267,6 +282,7 @@ function ProjectList() { /> + {deletionError && ( { - await ProjectsAPI.copy(project.id, { + const response = await ProjectsAPI.copy(project.id, { name: `${project.name} @ ${timeOfDay()}`, }); + if (response.status === 201) { + onCopy(response.data.id); + } await fetchProjects(); - }, [project.id, project.name, fetchProjects]); + }, [project.id, project.name, fetchProjects, onCopy]); const generateLastJobTooltip = (job) => ( <> diff --git a/awx/ui/src/types.js b/awx/ui/src/types.js index f0f95b1aa6..55fa23a6c6 100644 --- a/awx/ui/src/types.js +++ b/awx/ui/src/types.js @@ -9,6 +9,7 @@ import { oneOf, oneOfType, } from 'prop-types'; +import { AlertVariant } from '@patternfly/react-core'; export const Role = shape({ descendent_roles: arrayOf(string), @@ -428,3 +429,11 @@ export const SearchableKeys = arrayOf( type: string.isRequired, }) ); + +export const Toast = shape({ + title: string.isRequired, + variant: oneOf(Object.values(AlertVariant)).isRequired, + id: oneOfType([string, number]).isRequired, + hasTimeout: bool, + message: string, +}); From 4040e09cb858a269dbd70b3e38e63fcd0746c505 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Tue, 1 Mar 2022 16:25:12 -0800 Subject: [PATCH 063/125] Remove setTimeout and old comment from MeshGraph.js. --- awx/ui/src/screens/TopologyView/MeshGraph.js | 30 +++++++++----------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 583000a36b..a88f5d6dc4 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -2,7 +2,6 @@ import React, { useEffect, useState } from 'react'; import { useHistory } from 'react-router-dom'; import styled from 'styled-components'; import debounce from 'util/debounce'; -// import { t } from '@lingui/macro'; import * as d3 from 'd3'; import Legend from './Legend'; import Tooltip from './Tooltip'; @@ -195,23 +194,20 @@ function MeshGraph({ data, showLegend, zoom, setShowZoomControls }) { svg.call(zoom); function highlightSiblings(n) { - setTimeout(() => { + svg + .select(`circle.id-${n.id}`) + .attr('fill', DEFAULT_NODE_HIGHLIGHT_COLOR); + const immediate = links.filter( + (l) => + n.hostname === l.source.hostname || n.hostname === l.target.hostname + ); + immediate.forEach((s) => { svg - .select(`circle.id-${n.id}`) - .attr('fill', DEFAULT_NODE_HIGHLIGHT_COLOR); - const immediate = links.filter( - (l) => - n.hostname === l.source.hostname || - n.hostname === l.target.hostname - ); - immediate.forEach((s) => { - svg - .selectAll(`.link-${s.index}`) - .transition() - .style('stroke', '#0066CC') - .style('stroke-width', '3px'); - }); - }, 0); + .selectAll(`.link-${s.index}`) + .transition() + .style('stroke', '#0066CC') + .style('stroke-width', '3px'); + }); } function deselectSiblings(n) { From de1df8bf283831845bb0e67c223edbb530d3f34f Mon Sep 17 00:00:00 2001 From: Seth Foster Date: Wed, 2 Mar 2022 14:42:47 -0500 Subject: [PATCH 064/125] load job meta vars after JT extra vars --- awx/main/tasks/jobs.py | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py index 23bf8faa68..7d2057d62d 100644 --- a/awx/main/tasks/jobs.py +++ b/awx/main/tasks/jobs.py @@ -823,11 +823,12 @@ class RunJob(BaseTask): return job.playbook def build_extra_vars_file(self, job, private_data_dir): - # Define special extra_vars for AWX, combine with job.extra_vars. - extra_vars = job.awx_meta_vars() - + extra_vars = dict() + # load in JT extra vars if job.extra_vars_dict: extra_vars.update(json.loads(job.decrypted_extra_vars())) + # load in meta vars, overriding any variable set in JT extra vars + extra_vars.update(job.awx_meta_vars()) # By default, all extra vars disallow Jinja2 template usage for # security reasons; top level key-values defined in JT.extra_vars, however, @@ -1885,14 +1886,6 @@ class RunAdHocCommand(BaseTask): if ad_hoc_command.verbosity: args.append('-%s' % ('v' * min(5, ad_hoc_command.verbosity))) - extra_vars = ad_hoc_command.awx_meta_vars() - - if ad_hoc_command.extra_vars_dict: - redacted_extra_vars, removed_vars = extract_ansible_vars(ad_hoc_command.extra_vars_dict) - if removed_vars: - raise ValueError(_("{} are prohibited from use in ad hoc commands.").format(", ".join(removed_vars))) - extra_vars.update(ad_hoc_command.extra_vars_dict) - if ad_hoc_command.limit: args.append(ad_hoc_command.limit) else: @@ -1901,13 +1894,13 @@ class RunAdHocCommand(BaseTask): return args def build_extra_vars_file(self, ad_hoc_command, private_data_dir): - extra_vars = ad_hoc_command.awx_meta_vars() - + extra_vars = dict() if ad_hoc_command.extra_vars_dict: redacted_extra_vars, removed_vars = extract_ansible_vars(ad_hoc_command.extra_vars_dict) if removed_vars: raise ValueError(_("{} are prohibited from use in ad hoc commands.").format(", ".join(removed_vars))) extra_vars.update(ad_hoc_command.extra_vars_dict) + extra_vars.update(ad_hoc_command.awx_meta_vars()) self._write_extra_vars_file(private_data_dir, extra_vars) def build_module_name(self, ad_hoc_command): From 128400bfb5b68fe16e6c88628617685725ef504d Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Thu, 3 Mar 2022 10:11:54 -0500 Subject: [PATCH 065/125] Add resolved_action to analytics event data (#11816) * Add resolved_action to analytics event data * Bump collector version --- awx/main/analytics/collectors.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/awx/main/analytics/collectors.py b/awx/main/analytics/collectors.py index f8456ca2f1..fc8e95d5e3 100644 --- a/awx/main/analytics/collectors.py +++ b/awx/main/analytics/collectors.py @@ -337,6 +337,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create {tbl}.parent_uuid, {tbl}.event, task_action, + resolved_action, -- '-' operator listed here: -- https://www.postgresql.org/docs/12/functions-json.html -- note that operator is only supported by jsonb objects @@ -356,7 +357,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create x.duration AS duration, x.res->'warnings' AS warnings, x.res->'deprecations' AS deprecations - FROM {tbl}, jsonb_to_record({event_data}) AS x("res" json, "duration" text, "task_action" text, "start" text, "end" text) + FROM {tbl}, jsonb_to_record({event_data}) AS x("res" json, "duration" text, "task_action" text, "resolved_action" text, "start" text, "end" text) WHERE ({tbl}.{where_column} > '{since.isoformat()}' AND {tbl}.{where_column} <= '{until.isoformat()}')) TO STDOUT WITH CSV HEADER''' return query @@ -366,12 +367,12 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create return _copy_table(table='events', query=query(f"replace({tbl}.event_data::text, '\\u0000', '')::jsonb"), path=full_path) -@register('events_table', '1.3', format='csv', description=_('Automation task records'), expensive=four_hour_slicing) +@register('events_table', '1.4', format='csv', description=_('Automation task records'), expensive=four_hour_slicing) def events_table_unpartitioned(since, full_path, until, **kwargs): return _events_table(since, full_path, until, '_unpartitioned_main_jobevent', 'created', **kwargs) -@register('events_table', '1.3', format='csv', description=_('Automation task records'), expensive=four_hour_slicing) +@register('events_table', '1.4', format='csv', description=_('Automation task records'), expensive=four_hour_slicing) def events_table_partitioned_modified(since, full_path, until, **kwargs): return _events_table(since, full_path, until, 'main_jobevent', 'modified', project_job_created=True, **kwargs) From 17756f0e725fb3a87862ac8234a5974c67b0f6e2 Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Thu, 3 Mar 2022 11:13:11 -0500 Subject: [PATCH 066/125] Add job execution environment image to analytics data (#11835) * Add job execution environment image to analytics data * Add EE image to UJT analytics data * Bump the unified job templates table --- awx/main/analytics/collectors.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/awx/main/analytics/collectors.py b/awx/main/analytics/collectors.py index fc8e95d5e3..1270c87315 100644 --- a/awx/main/analytics/collectors.py +++ b/awx/main/analytics/collectors.py @@ -377,13 +377,14 @@ def events_table_partitioned_modified(since, full_path, until, **kwargs): return _events_table(since, full_path, until, 'main_jobevent', 'modified', project_job_created=True, **kwargs) -@register('unified_jobs_table', '1.2', format='csv', description=_('Data on jobs run'), expensive=four_hour_slicing) +@register('unified_jobs_table', '1.3', format='csv', description=_('Data on jobs run'), expensive=four_hour_slicing) def unified_jobs_table(since, full_path, until, **kwargs): unified_job_query = '''COPY (SELECT main_unifiedjob.id, main_unifiedjob.polymorphic_ctype_id, django_content_type.model, main_unifiedjob.organization_id, main_organization.name as organization_name, + main_executionenvironment.image as execution_environment_image, main_job.inventory_id, main_inventory.name as inventory_name, main_unifiedjob.created, @@ -408,6 +409,7 @@ def unified_jobs_table(since, full_path, until, **kwargs): LEFT JOIN main_job ON main_unifiedjob.id = main_job.unifiedjob_ptr_id LEFT JOIN main_inventory ON main_job.inventory_id = main_inventory.id LEFT JOIN main_organization ON main_organization.id = main_unifiedjob.organization_id + LEFT JOIN main_executionenvironment ON main_executionenvironment.id = main_unifiedjob.execution_environment_id WHERE ((main_unifiedjob.created > '{0}' AND main_unifiedjob.created <= '{1}') OR (main_unifiedjob.finished > '{0}' AND main_unifiedjob.finished <= '{1}')) AND main_unifiedjob.launch_type != 'sync' @@ -418,11 +420,12 @@ def unified_jobs_table(since, full_path, until, **kwargs): return _copy_table(table='unified_jobs', query=unified_job_query, path=full_path) -@register('unified_job_template_table', '1.0', format='csv', description=_('Data on job templates')) +@register('unified_job_template_table', '1.1', format='csv', description=_('Data on job templates')) def unified_job_template_table(since, full_path, **kwargs): unified_job_template_query = '''COPY (SELECT main_unifiedjobtemplate.id, main_unifiedjobtemplate.polymorphic_ctype_id, django_content_type.model, + main_executionenvironment.image as execution_environment_image, main_unifiedjobtemplate.created, main_unifiedjobtemplate.modified, main_unifiedjobtemplate.created_by_id, @@ -435,7 +438,8 @@ def unified_job_template_table(since, full_path, **kwargs): main_unifiedjobtemplate.next_job_run, main_unifiedjobtemplate.next_schedule_id, main_unifiedjobtemplate.status - FROM main_unifiedjobtemplate, django_content_type + FROM main_unifiedjobtemplate + LEFT JOIN main_executionenvironment ON main_executionenvironment.id = main_unifiedjobtemplate.execution_environment_id, django_content_type WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER''' return _copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path) From f1efc578cb6f333b5c448a9e9dd39b51274ed2cd Mon Sep 17 00:00:00 2001 From: nixocio Date: Thu, 3 Mar 2022 15:38:12 -0500 Subject: [PATCH 067/125] Split UI test run Split UI test run See: https://github.com/ansible/awx/issues/10678 --- .github/workflows/ci.yml | 9 ++++++--- Makefile | 11 ++++++++++- awx/ui/package.json | 2 ++ 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 99df2f7d72..c9465e9c32 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,9 +33,12 @@ jobs: - name: ui-lint label: Run UI Linters command: make ui-lint - - name: ui-test - label: Run UI Tests - command: make ui-test + - name: ui-test-screens + label: Run UI Screens Tests + command: make ui-test-screens + - name: ui-test-general + label: Run UI General Tests + command: make ui-test-general steps: - uses: actions/checkout@v2 diff --git a/Makefile b/Makefile index 839851f9e2..968db81d57 100644 --- a/Makefile +++ b/Makefile @@ -402,9 +402,18 @@ ui-lint: ui-test: $(NPM_BIN) --prefix awx/ui install - $(NPM_BIN) run --prefix awx/ui test + $(NPM_BIN) run --prefix awx/ui test +ui-test-screens: + $(NPM_BIN) --prefix awx/ui install + $(NPM_BIN) run --prefix awx/ui pretest + $(NPM_BIN) run --prefix awx/ui test-screens --runInBand +ui-test-general: + $(NPM_BIN) --prefix awx/ui install + $(NPM_BIN) run --prefix awx/ui pretest + $(NPM_BIN) run --prefix awx/ui/ test-general --runInBand + # Build a pip-installable package into dist/ with a timestamped version number. dev_build: $(PYTHON) setup.py dev_build diff --git a/awx/ui/package.json b/awx/ui/package.json index 040d6c14d1..adaacbb5e8 100644 --- a/awx/ui/package.json +++ b/awx/ui/package.json @@ -75,6 +75,8 @@ "start-instrumented": "ESLINT_NO_DEV_ERRORS=true DEBUG=instrument-cra PORT=3001 HTTPS=true DANGEROUSLY_DISABLE_HOST_CHECK=true react-scripts -r @cypress/instrument-cra start", "build": "INLINE_RUNTIME_CHUNK=false react-scripts build", "test": "TZ='UTC' react-scripts test --watchAll=false", + "test-screens": "TZ='UTC' react-scripts test screens --watchAll=false", + "test-general": "TZ='UTC' react-scripts test --testPathIgnorePatterns='/src/screens/' --watchAll=false", "test-watch": "TZ='UTC' react-scripts test", "eject": "react-scripts eject", "lint": "eslint --ext .js --ext .jsx .", From a155f5561fc4ed07c3f1d883192f2ceea822a598 Mon Sep 17 00:00:00 2001 From: Marliana Lara Date: Thu, 3 Mar 2022 13:00:42 -0500 Subject: [PATCH 068/125] Remove user_only roles from User and Team permission modal --- .../UserAndTeamAccessAdd.js | 22 +++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/awx/ui/src/components/UserAndTeamAccessAdd/UserAndTeamAccessAdd.js b/awx/ui/src/components/UserAndTeamAccessAdd/UserAndTeamAccessAdd.js index 7396daeff4..28d71d7424 100644 --- a/awx/ui/src/components/UserAndTeamAccessAdd/UserAndTeamAccessAdd.js +++ b/awx/ui/src/components/UserAndTeamAccessAdd/UserAndTeamAccessAdd.js @@ -1,6 +1,6 @@ import React, { useState, useCallback } from 'react'; import { t } from '@lingui/macro'; -import { useParams } from 'react-router-dom'; +import { useParams, useRouteMatch } from 'react-router-dom'; import styled from 'styled-components'; import useRequest from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; @@ -27,6 +27,11 @@ function UserAndTeamAccessAdd({ const [selectedResourceType, setSelectedResourceType] = useState(null); const [stepIdReached, setStepIdReached] = useState(1); const { id: userId } = useParams(); + const teamsRouteMatch = useRouteMatch({ + path: '/teams/:id/roles', + exact: true, + }); + const { selected: resourcesSelected, handleSelect: handleResourceSelect } = useSelected([]); @@ -54,6 +59,19 @@ function UserAndTeamAccessAdd({ {} ); + // Object roles can be user only, so we remove them when + // showing role choices for team access + const selectableRoles = { + ...resourcesSelected[0]?.summary_fields?.object_roles, + }; + if (teamsRouteMatch && resourcesSelected[0]?.type === 'organization') { + Object.keys(selectableRoles).forEach((key) => { + if (selectableRoles[key].user_only) { + delete selectableRoles[key]; + } + }); + } + const steps = [ { id: 1, @@ -101,7 +119,7 @@ function UserAndTeamAccessAdd({ component: resourcesSelected?.length > 0 && ( Date: Fri, 4 Mar 2022 14:03:17 -0500 Subject: [PATCH 069/125] Api issue float (#11757) * Fix integer/float errors in survey * Add SURVEY_TYPE_MAPPING to constants Add SURVEY_TYPE_MAPPING to constants, and replace usage in a couple of files. Co-authored-by: Alexander Komarov --- awx/api/views/__init__.py | 10 +++--- awx/main/constants.py | 2 ++ .../tests/unit/models/test_survey_models.py | 32 +++++++++++++++++++ 3 files changed, 38 insertions(+), 6 deletions(-) diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 51ab4c9dd2..163fa4e727 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -113,7 +113,7 @@ from awx.api.permissions import ( from awx.api import renderers from awx.api import serializers from awx.api.metadata import RoleMetadata -from awx.main.constants import ACTIVE_STATES +from awx.main.constants import ACTIVE_STATES, SURVEY_TYPE_MAPPING from awx.main.scheduler.dag_workflow import WorkflowDAG from awx.api.views.mixin import ( ControlledByScmMixin, @@ -2468,8 +2468,6 @@ class JobTemplateSurveySpec(GenericAPIView): obj_permission_type = 'admin' serializer_class = serializers.EmptySerializer - ALLOWED_TYPES = {'text': str, 'textarea': str, 'password': str, 'multiplechoice': str, 'multiselect': str, 'integer': int, 'float': float} - def get(self, request, *args, **kwargs): obj = self.get_object() return Response(obj.display_survey_spec()) @@ -2540,17 +2538,17 @@ class JobTemplateSurveySpec(GenericAPIView): # Type-specific validation # validate question type <-> default type qtype = survey_item["type"] - if qtype not in JobTemplateSurveySpec.ALLOWED_TYPES: + if qtype not in SURVEY_TYPE_MAPPING: return Response( dict( error=_("'{survey_item[type]}' in survey question {idx} is not one of '{allowed_types}' allowed question types.").format( - allowed_types=', '.join(JobTemplateSurveySpec.ALLOWED_TYPES.keys()), **context + allowed_types=', '.join(SURVEY_TYPE_MAPPING.keys()), **context ) ), status=status.HTTP_400_BAD_REQUEST, ) if 'default' in survey_item and survey_item['default'] != '': - if not isinstance(survey_item['default'], JobTemplateSurveySpec.ALLOWED_TYPES[qtype]): + if not isinstance(survey_item['default'], SURVEY_TYPE_MAPPING[qtype]): type_label = 'string' if qtype in ['integer', 'float']: type_label = qtype diff --git a/awx/main/constants.py b/awx/main/constants.py index 9074d9bd7f..d87bf82983 100644 --- a/awx/main/constants.py +++ b/awx/main/constants.py @@ -93,3 +93,5 @@ JOB_FOLDER_PREFIX = 'awx_%s_' # /HOST-DIR:/CONTAINER-DIR:OPTIONS CONTAINER_VOLUMES_MOUNT_TYPES = ['z', 'O', 'ro', 'rw'] MAX_ISOLATED_PATH_COLON_DELIMITER = 2 + +SURVEY_TYPE_MAPPING = {'text': str, 'textarea': str, 'password': str, 'multiplechoice': str, 'multiselect': str, 'integer': int, 'float': (float, int)} diff --git a/awx/main/tests/unit/models/test_survey_models.py b/awx/main/tests/unit/models/test_survey_models.py index c3c9a8723f..9ec5673cd8 100644 --- a/awx/main/tests/unit/models/test_survey_models.py +++ b/awx/main/tests/unit/models/test_survey_models.py @@ -59,6 +59,38 @@ class SurveyVariableValidation: assert accepted == {} assert str(errors[0]) == "Value 5 for 'a' expected to be a string." + def test_job_template_survey_default_variable_validation(self, job_template_factory): + objects = job_template_factory( + "survey_variable_validation", + organization="org1", + inventory="inventory1", + credential="cred1", + persisted=False, + ) + obj = objects.job_template + obj.survey_spec = { + "description": "", + "spec": [ + { + "required": True, + "min": 0, + "default": "2", + "max": 1024, + "question_description": "", + "choices": "", + "variable": "a", + "question_name": "float_number", + "type": "float", + } + ], + "name": "", + } + + obj.survey_enabled = True + accepted, _, errors = obj.accept_or_ignore_variables({"a": 2}) + assert accepted == {{"a": 2.0}} + assert not errors + @pytest.fixture def job(mocker): From 264c508c805267b420a56a8429d1293ac4355ef0 Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Tue, 1 Mar 2022 20:08:54 -0500 Subject: [PATCH 070/125] Move model and settings operations out of threaded code This is to avoid references to settings in threads, this is known to create problems when caches expire this leads to KeyError in environments with heavy load --- awx/main/tasks/receptor.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/awx/main/tasks/receptor.py b/awx/main/tasks/receptor.py index d2809e27ac..5d58217b6f 100644 --- a/awx/main/tasks/receptor.py +++ b/awx/main/tasks/receptor.py @@ -286,19 +286,18 @@ class AWXReceptorJob: # reading. sockin, sockout = socket.socketpair() + # Prepare the submit_work kwargs before creating threads, because references to settings are not thread-safe + work_submit_kw = dict(worktype=self.work_type, params=self.receptor_params, signwork=self.sign_work) + if self.work_type == 'ansible-runner': + work_submit_kw['node'] = self.task.instance.execution_node + use_stream_tls = get_conn_type(work_submit_kw['node'], receptor_ctl).name == "STREAMTLS" + work_submit_kw['tlsclient'] = get_tls_client(use_stream_tls) + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: transmitter_future = executor.submit(self.transmit, sockin) - _kw = {} - if self.work_type == 'ansible-runner': - _kw['node'] = self.task.instance.execution_node - use_stream_tls = get_conn_type(_kw['node'], receptor_ctl).name == "STREAMTLS" - _kw['tlsclient'] = get_tls_client(use_stream_tls) - # submit our work, passing in the right side of our socketpair for reading. - result = receptor_ctl.submit_work( - worktype=self.work_type, payload=sockout.makefile('rb'), params=self.receptor_params, signwork=self.sign_work, **_kw - ) + result = receptor_ctl.submit_work(payload=sockout.makefile('rb'), **work_submit_kw) sockin.close() sockout.close() From 69ea456cf6bde97383d943583eb4bfcb73392c18 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 3 Mar 2022 16:02:53 -0500 Subject: [PATCH 071/125] Expand out the early membership role assignment The Member role can derive from e.g. the Org Admin role, so basically all organization and team roles should be assigned first, so that RBAC conditions are met when assigning later roles. --- awxkit/awxkit/api/pages/api.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 1feadf444d..7ce4b7a042 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -1,3 +1,4 @@ +from collections import defaultdict import itertools import logging @@ -204,7 +205,7 @@ class ApiV2(base.Base): # Import methods - def _dependent_resources(self, data): + def _dependent_resources(self): page_resource = {getattr(self, resource)._create().__item_class__: resource for resource in self.json} data_pages = [getattr(self, resource)._create().__item_class__ for resource in EXPORTABLE_RESOURCES] @@ -256,7 +257,12 @@ class ApiV2(base.Base): if not S: continue if name == 'roles': - self._roles.append((_page, S)) + indexed_roles = defaultdict(list) + for role in S: + if 'content_object' not in role: + continue + indexed_roles[role['content_object']['type']].append(role) + self._roles.append((_page, indexed_roles)) else: self._related.append((_page, name, S)) @@ -278,17 +284,17 @@ class ApiV2(base.Base): log.debug("post_data: %r", {'id': role_page['id']}) def _assign_membership(self): - for _page, roles in self._roles: + for _page, indexed_roles in self._roles: role_endpoint = _page.json['related']['roles'] - for role in roles: - if role['name'] == 'Member': + for content_type in ('organization', 'team'): + for role in indexed_roles.get(content_type, []): self._assign_role(role_endpoint, role) def _assign_roles(self): - for _page, roles in self._roles: + for _page, indexed_roles in self._roles: role_endpoint = _page.json['related']['roles'] - for role in roles: - if role['name'] != 'Member': + for content_type in set(indexed_roles) - {'organization', 'team'}: + for role in indexed_roles.get(content_type, []): self._assign_role(role_endpoint, role) def _assign_related(self): @@ -330,7 +336,7 @@ class ApiV2(base.Base): changed = False - for resource in self._dependent_resources(data): + for resource in self._dependent_resources(): endpoint = getattr(self, resource) # Load up existing objects, so that we can try to update or link to them self._cache.get_page(endpoint) From a5b888c19393ba136a925d1e4ef9e170b037fcc3 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Tue, 1 Mar 2022 08:55:25 -0500 Subject: [PATCH 072/125] Add default container mounts to AWX_ISOLATION_SHOW_PATHS --- awx/main/tests/functional/api/test_settings.py | 5 ++++- awx/settings/defaults.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/awx/main/tests/functional/api/test_settings.py b/awx/main/tests/functional/api/test_settings.py index a1ae7398a5..0e9bf08297 100644 --- a/awx/main/tests/functional/api/test_settings.py +++ b/awx/main/tests/functional/api/test_settings.py @@ -38,7 +38,10 @@ def test_jobs_settings(get, put, patch, delete, admin): data.pop('AWX_ANSIBLE_CALLBACK_PLUGINS') put(url, user=admin, data=data, expect=200) response = get(url, user=admin, expect=200) - assert response.data['AWX_ISOLATION_SHOW_PATHS'] == [] + assert response.data['AWX_ISOLATION_SHOW_PATHS'] == [ + '/etc/pki/ca-trust:/etc/pki/ca-trust:O', + '/usr/share/pki:/usr/share/pki:O', + ] assert response.data['AWX_ANSIBLE_CALLBACK_PLUGINS'] == [] diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index bc3c2549c3..ef3999a0fd 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -589,7 +589,10 @@ GALAXY_IGNORE_CERTS = False # Additional paths to show for jobs using process isolation. # Note: This setting may be overridden by database settings. -AWX_ISOLATION_SHOW_PATHS = [] +AWX_ISOLATION_SHOW_PATHS = [ + '/etc/pki/ca-trust:/etc/pki/ca-trust:O', + '/usr/share/pki:/usr/share/pki:O', +] # The directory in which the service will create new temporary directories for job # execution and isolation (such as credential files and custom From a65948de6970cfb386ed0558bc0ac122cddfdf4c Mon Sep 17 00:00:00 2001 From: Marliana Lara Date: Mon, 28 Feb 2022 14:45:52 -0500 Subject: [PATCH 073/125] Add unique row id to subscription modal list items --- .../SubscriptionEdit/SubscriptionModal.js | 32 +++++++++---------- .../SubscriptionModal.test.js | 8 ++--- .../SubscriptionEdit/SubscriptionStep.js | 2 +- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.js b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.js index 69ef9dc029..99a9c993af 100644 --- a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.js +++ b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.js @@ -47,6 +47,15 @@ function SubscriptionModal({ subscriptionCreds.username, subscriptionCreds.password ); + + // Ensure unique ids for each subscription + // because it is possible to have multiple + // subscriptions with the same pool_id + let repeatId = 1; + data.forEach((i) => { + i.id = repeatId++; + }); + return data; }, []), // eslint-disable-line react-hooks/exhaustive-deps [] @@ -64,17 +73,9 @@ function SubscriptionModal({ fetchSubscriptions(); }, [fetchSubscriptions]); - const handleSelect = (item) => { - if (selected.some((s) => s.pool_id === item.pool_id)) { - setSelected(selected.filter((s) => s.pool_id !== item.pool_id)); - } else { - setSelected(selected.concat(item)); - } - }; - useEffect(() => { - if (selectedSubscription?.pool_id) { - handleSelect({ pool_id: selectedSubscription.pool_id }); + if (selectedSubscription?.id) { + setSelected([selectedSubscription]); } }, []); // eslint-disable-line react-hooks/exhaustive-deps @@ -150,19 +151,18 @@ function SubscriptionModal({
{subscriptions.map((subscription) => ( diff --git a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.test.js b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.test.js index 6667a776ff..4c8fa843f2 100644 --- a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.test.js +++ b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionModal.test.js @@ -125,14 +125,14 @@ describe('', () => { password: '$encrypted', }} selectedSubscription={{ - pool_id: 8, + id: 2, }} /> ); await waitForElement(wrapper, 'table'); - expect(wrapper.find('tr[id=7] input').prop('checked')).toBe(false); - expect(wrapper.find('tr[id=8] input').prop('checked')).toBe(true); - expect(wrapper.find('tr[id=9] input').prop('checked')).toBe(false); + expect(wrapper.find('tr[id="row-1"] input').prop('checked')).toBe(false); + expect(wrapper.find('tr[id="row-2"] input').prop('checked')).toBe(true); + expect(wrapper.find('tr[id="row-3"] input').prop('checked')).toBe(false); }); }); diff --git a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionStep.js b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionStep.js index 2d9aebc631..333a7939fd 100644 --- a/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionStep.js +++ b/awx/ui/src/screens/Setting/Subscription/SubscriptionEdit/SubscriptionStep.js @@ -227,7 +227,7 @@ function SubscriptionStep() { username: username.value, password: password.value, }} - selectedSubscripion={subscription?.value} + selectedSubscription={subscription?.value} onClose={closeModal} onConfirm={(value) => subscriptionHelpers.setValue(value)} /> From 6e5cde0b0525e83666665e41b66800fbb841ea0b Mon Sep 17 00:00:00 2001 From: Yanis Guenane Date: Mon, 7 Mar 2022 14:55:25 +0100 Subject: [PATCH 074/125] requirements: Add packaging deps following runner upgrade --- docs/licenses/packaging.txt | 3 +++ requirements/requirements.txt | 6 +++++- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 docs/licenses/packaging.txt diff --git a/docs/licenses/packaging.txt b/docs/licenses/packaging.txt new file mode 100644 index 0000000000..6f62d44e4e --- /dev/null +++ b/docs/licenses/packaging.txt @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 239169bf27..f05ebe3b96 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -224,6 +224,8 @@ oauthlib==3.1.0 # social-auth-core openshift==0.11.0 # via -r /awx_devel/requirements/requirements.in +packaging==21.3 + # via ansible-runner pbr==5.6.0 # via -r /awx_devel/requirements/requirements.in pexpect==4.7.0 @@ -265,7 +267,9 @@ pyjwt==1.7.1 pyopenssl==19.1.0 # via twisted pyparsing==2.4.6 - # via -r /awx_devel/requirements/requirements.in + # via + # -r /awx_devel/requirements/requirements.in + # packaging pyrad==2.3 # via django-radius pyrsistent==0.15.7 From 9f021b780c48f1bae1a4e4df98b8a0591977d0fb Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Mon, 7 Mar 2022 12:49:11 -0500 Subject: [PATCH 075/125] Move default show paths to production.py This breaks the dev env --- awx/main/tests/functional/api/test_settings.py | 5 +---- awx/settings/defaults.py | 5 +---- awx/settings/production.py | 5 +++++ 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/awx/main/tests/functional/api/test_settings.py b/awx/main/tests/functional/api/test_settings.py index 0e9bf08297..a1ae7398a5 100644 --- a/awx/main/tests/functional/api/test_settings.py +++ b/awx/main/tests/functional/api/test_settings.py @@ -38,10 +38,7 @@ def test_jobs_settings(get, put, patch, delete, admin): data.pop('AWX_ANSIBLE_CALLBACK_PLUGINS') put(url, user=admin, data=data, expect=200) response = get(url, user=admin, expect=200) - assert response.data['AWX_ISOLATION_SHOW_PATHS'] == [ - '/etc/pki/ca-trust:/etc/pki/ca-trust:O', - '/usr/share/pki:/usr/share/pki:O', - ] + assert response.data['AWX_ISOLATION_SHOW_PATHS'] == [] assert response.data['AWX_ANSIBLE_CALLBACK_PLUGINS'] == [] diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index ef3999a0fd..bc3c2549c3 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -589,10 +589,7 @@ GALAXY_IGNORE_CERTS = False # Additional paths to show for jobs using process isolation. # Note: This setting may be overridden by database settings. -AWX_ISOLATION_SHOW_PATHS = [ - '/etc/pki/ca-trust:/etc/pki/ca-trust:O', - '/usr/share/pki:/usr/share/pki:O', -] +AWX_ISOLATION_SHOW_PATHS = [] # The directory in which the service will create new temporary directories for job # execution and isolation (such as credential files and custom diff --git a/awx/settings/production.py b/awx/settings/production.py index 75b70f7bfc..9f480a188a 100644 --- a/awx/settings/production.py +++ b/awx/settings/production.py @@ -91,3 +91,8 @@ except IOError: DATABASES.setdefault('default', dict()).setdefault('OPTIONS', dict()).setdefault( 'application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63] ) # noqa + +AWX_ISOLATION_SHOW_PATHS = [ + '/etc/pki/ca-trust:/etc/pki/ca-trust:O', + '/usr/share/pki:/usr/share/pki:O', +] From df61d1a59ce6c9142e8f0848a24b6f38e7b0be94 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 31 Jan 2022 11:40:00 -0500 Subject: [PATCH 076/125] Upgrade to Django 3.0 - upgrades - Django 3.0.14 - django-jsonfield 1.4.1 (from 1.2.0) - django-oauth-toolkit 1.4.1 (from 1.1.3) - Stopping here because later versions have changes to the underlying model to support OpenID Connect. Presumably this can be dealt with via a migration in our project. - django-guid 2.2.1 (from 2.2.0) - django-debug-toolbar 3.2.4 (from 1.11.1) - python3-saml 1.13.0 (from 1.9.0) - xmlsec 1.3.12 (from 1.3.3) - Remove our project's use of django.utils.six in favor of directly using six, in awx.sso.fields. - Temporarily monkey patch six back in as django.utils.six, since django-jsonfield makes use of that import, and is no longer being updated. Hopefully we can do away with this dependency with the new generalized JSONField brought in with Django 3.1. - Force a json decoder to be used with all instances of JSONField brought in by django-jsonfield. This deals with the 'cast to text' problem noted previously in our UPGRADE_BLOCKERS. - Remove the validate_uris validator from the OAuth2Application in migration 0025, per the UPGRADE_BLOCKERS, and remove that note. - Update the TEMPLATES setting to satisfy Django Debug Toolbar. It requires at least one entry that has APP_DIRS=True, and as near as I can tell our custom OPTIONS.loaders setting was effectively doing the same thing as Django's own machinery if this setting is set. --- awx/__init__.py | 4 +++ awx/main/fields.py | 4 +++ ...330_add_oauth_activity_stream_registrar.py | 2 +- awx/settings/defaults.py | 7 ++--- awx/settings/development.py | 4 --- awx/sso/fields.py | 3 +- docs/licenses/pkgconfig.txt | 19 ------------ docs/licenses/ruamel.yaml.clib.txt | 21 ------------- requirements/README.md | 15 ++++----- requirements/requirements.in | 10 +++--- requirements/requirements.txt | 31 +++++++++---------- requirements/requirements_dev.txt | 2 +- requirements/updater.sh | 3 +- 13 files changed, 42 insertions(+), 83 deletions(-) delete mode 100644 docs/licenses/pkgconfig.txt delete mode 100644 docs/licenses/ruamel.yaml.clib.txt diff --git a/awx/__init__.py b/awx/__init__.py index fa3e164092..31806538c2 100644 --- a/awx/__init__.py +++ b/awx/__init__.py @@ -6,6 +6,8 @@ import os import sys import warnings +import six + from pkg_resources import get_distribution __version__ = get_distribution('awx').version @@ -35,7 +37,9 @@ else: from django.db.models import indexes from django.db.backends.utils import names_digest from django.db import connection + from django import utils + utils.six = six # FIXME: monkey patch to get us through for now if HAS_DJANGO is True: diff --git a/awx/main/fields.py b/awx/main/fields.py index 95ebfbca73..71bf0c612f 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -72,6 +72,10 @@ Draft4Validator.VALIDATORS['enum'] = __enum_validate__ class JSONField(upstream_JSONField): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.decoder_kwargs = {'cls': json.JSONDecoder} # FIXME + def db_type(self, connection): return 'text' diff --git a/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py b/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py index cc1d1bfeba..e26571f1b9 100644 --- a/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py +++ b/awx/main/migrations/0025_v330_add_oauth_activity_stream_registrar.py @@ -29,7 +29,7 @@ class Migration(migrations.Migration): ('client_id', models.CharField(db_index=True, default=oauth2_provider.generators.generate_client_id, max_length=100, unique=True)), ( 'redirect_uris', - models.TextField(blank=True, help_text='Allowed URIs list, space separated', validators=[oauth2_provider.validators.validate_uris]), + models.TextField(blank=True, help_text='Allowed URIs list, space separated'), ), ('client_type', models.CharField(choices=[('confidential', 'Confidential'), ('public', 'Public')], max_length=32)), ( diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index bc3c2549c3..fcd422a3dd 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -273,8 +273,8 @@ TEMPLATES = [ { 'NAME': 'default', 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'APP_DIRS': True, 'OPTIONS': { - 'debug': DEBUG, 'context_processors': [ # NOQA 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', @@ -289,13 +289,10 @@ TEMPLATES = [ 'social_django.context_processors.backends', 'social_django.context_processors.login_redirect', ], - 'loaders': [ - ('django.template.loaders.cached.Loader', ('django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader')) - ], 'builtins': ['awx.main.templatetags.swagger'], }, 'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, 'ui', 'build'), os.path.join(BASE_DIR, 'ui', 'public')], - } + }, ] ROOT_URLCONF = 'awx.urls' diff --git a/awx/settings/development.py b/awx/settings/development.py index 70b64643dd..be1c115606 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -45,10 +45,6 @@ SESSION_COOKIE_SECURE = False # Disallow sending csrf cookies over insecure connections CSRF_COOKIE_SECURE = False -# Override django.template.loaders.cached.Loader in defaults.py -template = next((tpl_backend for tpl_backend in TEMPLATES if tpl_backend['NAME'] == 'default'), None) # noqa -template['OPTIONS']['loaders'] = ('django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader') - # Disable Pendo on the UI for development/test. # Note: This setting may be overridden by database settings. PENDO_TRACKING_STATE = "off" diff --git a/awx/sso/fields.py b/awx/sso/fields.py index e2d46d9362..0e4af005c9 100644 --- a/awx/sso/fields.py +++ b/awx/sso/fields.py @@ -4,12 +4,13 @@ import inspect import json import re +import six + # Python LDAP import ldap import awx # Django -from django.utils import six from django.utils.translation import ugettext_lazy as _ # Django Auth LDAP diff --git a/docs/licenses/pkgconfig.txt b/docs/licenses/pkgconfig.txt deleted file mode 100644 index 716f12754d..0000000000 --- a/docs/licenses/pkgconfig.txt +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2013 Matthias Vogelgesang - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/docs/licenses/ruamel.yaml.clib.txt b/docs/licenses/ruamel.yaml.clib.txt deleted file mode 100644 index 1c3e20a20e..0000000000 --- a/docs/licenses/ruamel.yaml.clib.txt +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2019 Anthon van der Neut, Ruamel bvba - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/requirements/README.md b/requirements/README.md index 69ab2d4bcf..5dc2638c3c 100644 --- a/requirements/README.md +++ b/requirements/README.md @@ -58,7 +58,7 @@ Make sure to delete the old tarball if it is an upgrade. Anything pinned in `*.in` files involves additional manual work in order to upgrade. Some information related to that work is outlined here. -### django +### Django For any upgrade of Django, it must be confirmed that we don't regress on FIPS support before merging. @@ -90,13 +90,10 @@ that we have the latest version ### django-oauth-toolkit -Version 1.2.0 of this project has a bug that error when revoking tokens. -This is fixed in the master branch but is not yet released. - -When upgrading past 1.2.0 in the future, the `0025` migration needs to be -edited, just like the old migration was edited in the project: -https://github.com/jazzband/django-oauth-toolkit/commit/96538876d0d7ea0319ba5286f9bde842a906e1c5 -The field can simply have the validator method `validate_uris` removed. +Versions later than 1.4.1 throw an error about id_token_id, due to the +OpenID Connect work that was done in +https://github.com/jazzband/django-oauth-toolkit/pull/915. This may +be fixable by creating a migration on our end? ### azure-keyvault @@ -117,7 +114,7 @@ https://github.com/adamchainz/django-jsonfield/pull/14 This breaks a very large amount of AWX code that assumes these fields are returned as dicts. Upgrading this library will require a refactor -to accomidate this change. +to accommodate this change. ### pip and setuptools diff --git a/requirements/requirements.in b/requirements/requirements.in index 94ece3085d..d60639aa1b 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -9,14 +9,14 @@ cryptography>=3.2 Cython<3 # Since the bump to PyYAML 5.4.1 this is now a mandatory dep daphne distro -django==2.2.24 # see UPGRADE BLOCKERs +django==3.0.14 # see UPGRADE BLOCKERs django-auth-ldap django-cors-headers>=3.5.0 django-crum django-extensions>=2.2.9 # https://github.com/ansible/awx/pull/6441 -django-guid==2.2.0 # pinned to match Django 2.2 -django-jsonfield==1.2.0 # see UPGRADE BLOCKERs -django-oauth-toolkit==1.1.3 # see UPGRADE BLOCKERs +django-guid==2.2.1 # see https://pypi.org/project/django-guid/ for supported versions +django-jsonfield==1.4.1 +django-oauth-toolkit==1.4.1 django-polymorphic django-pglocks django-qsstats-magic @@ -40,7 +40,7 @@ psycopg2 psutil pygerduty pyparsing -python3-saml +python3-saml==1.13.0 python-dsv-sdk python-tss-sdk==1.0.0 python-ldap>=3.3.1 # https://github.com/python-ldap/python-ldap/issues/270 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index f05ebe3b96..2e16a46797 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -14,6 +14,7 @@ asgiref==3.2.5 # channels # channels-redis # daphne + # django async-timeout==3.0.1 # via # aiohttp @@ -80,13 +81,12 @@ dataclasses==0.6 defusedxml==0.6.0 # via # python3-openid - # python3-saml # social-auth-core dictdiffer==0.8.1 # via openshift distro==1.5.0 # via -r /awx_devel/requirements/requirements.in -django==2.2.24 +django==3.0.14 # via # -r /awx_devel/requirements/requirements.in # channels @@ -107,11 +107,11 @@ django-crum==0.7.5 # via -r /awx_devel/requirements/requirements.in django-extensions==2.2.9 # via -r /awx_devel/requirements/requirements.in -django-guid==2.2.0 +django-guid==2.2.1 # via -r /awx_devel/requirements/requirements.in -django-jsonfield==1.2.0 +django-jsonfield==1.4.1 # via -r /awx_devel/requirements/requirements.in -django-oauth-toolkit==1.1.3 +django-oauth-toolkit==1.4.1 # via -r /awx_devel/requirements/requirements.in django-pglocks==1.0.4 # via -r /awx_devel/requirements/requirements.in @@ -159,7 +159,7 @@ incremental==17.5.0 # via twisted irc==18.0.0 # via -r /awx_devel/requirements/requirements.in -isodate==0.6.0 +isodate==0.6.1 # via # msrest # python3-saml @@ -180,7 +180,7 @@ jaraco.text==3.2.0 # via # irc # jaraco.collections -jinja2==2.11.2 +jinja2==3.0.3 # via # -r /awx_devel/requirements/requirements.in # openshift @@ -192,11 +192,13 @@ kubernetes==11.0.0 # via openshift lockfile==0.12.2 # via python-daemon -lxml==4.6.3 - # via xmlsec +lxml==4.7.0 + # via + # python3-saml + # xmlsec markdown==3.2.1 # via -r /awx_devel/requirements/requirements.in -markupsafe==1.1.1 +markupsafe==2.0.1 # via jinja2 more-itertools==8.2.0 # via @@ -232,8 +234,6 @@ pexpect==4.7.0 # via # -r /awx_devel/requirements/requirements.in # ansible-runner -pkgconfig==1.5.1 - # via xmlsec prometheus-client==0.7.1 # via -r /awx_devel/requirements/requirements.in psutil==5.8.0 @@ -293,7 +293,7 @@ python-tss-sdk==1.0.0 # via -r /awx_devel/requirements/requirements.in python3-openid==3.1.0 # via social-auth-core -python3-saml==1.9.0 +python3-saml==1.13.0 # via -r /awx_devel/requirements/requirements.in pytz==2019.3 # via @@ -336,8 +336,6 @@ rsa==4.7.2 # via google-auth ruamel.yaml==0.16.10 # via openshift -ruamel.yaml.clib==0.2.0 - # via ruamel.yaml schedule==0.6.0 # via -r /awx_devel/requirements/requirements.in service-identity==18.1.0 @@ -348,6 +346,7 @@ six==1.14.0 # automat # cryptography # django-extensions + # django-jsonfield # django-pglocks # google-auth # isodate @@ -407,7 +406,7 @@ websocket-client==0.57.0 # via kubernetes wheel==0.36.2 # via -r /awx_devel/requirements/requirements.in -xmlsec==1.3.3 +xmlsec==1.3.12 # via python3-saml yarl==1.4.2 # via aiohttp diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index 582eac7fb9..bea1e88dc2 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -1,4 +1,4 @@ -django-debug-toolbar==1.11.1 +django-debug-toolbar==3.2.4 django-rest-swagger # pprofile - re-add once https://github.com/vpelletier/pprofile/issues/41 is addressed ipython==7.21.0 diff --git a/requirements/updater.sh b/requirements/updater.sh index fa9ae7ddd2..01f6000d2c 100755 --- a/requirements/updater.sh +++ b/requirements/updater.sh @@ -18,7 +18,8 @@ generate_requirements() { # shellcheck disable=SC1090 source ${venv}/bin/activate - ${venv}/bin/python3 -m pip install -U pip pip-tools + # FIXME: https://github.com/jazzband/pip-tools/issues/1558 + ${venv}/bin/python3 -m pip install -U 'pip<22.0' pip-tools ${pip_compile} "${requirements_in}" "${requirements_git}" --output-file requirements.txt # consider the git requirements for purposes of resolving deps From efff85bc1f1e05d458085b59bcef0560c97a838a Mon Sep 17 00:00:00 2001 From: Bill Nottingham Date: Tue, 24 Aug 2021 17:09:50 -0400 Subject: [PATCH 077/125] Fix wsbroadcast for django 3.0+ async From https://docs.djangoproject.com/en/3.0/topics/async/#asgiref.sync.sync_to_async --- awx/main/wsbroadcast.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/awx/main/wsbroadcast.py b/awx/main/wsbroadcast.py index ec2fae5e89..cc806e7f26 100644 --- a/awx/main/wsbroadcast.py +++ b/awx/main/wsbroadcast.py @@ -4,6 +4,7 @@ import asyncio import aiohttp from aiohttp import client_exceptions +from asgiref.sync import sync_to_async from channels.layers import get_channel_layer @@ -30,6 +31,7 @@ def unwrap_broadcast_msg(payload: dict): return (payload['group'], payload['message']) +@sync_to_async def get_broadcast_hosts(): Instance = apps.get_model('main', 'Instance') instances = ( @@ -170,7 +172,7 @@ class BroadcastWebsocketManager(object): async def run_per_host_websocket(self): while True: - known_hosts = get_broadcast_hosts() + known_hosts = await get_broadcast_hosts() future_remote_hosts = known_hosts.keys() current_remote_hosts = self.broadcast_tasks.keys() deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts) From a3a216f91f1158fd54c001c34cbdf2f68ccbc272 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 31 Jan 2022 13:05:25 -0500 Subject: [PATCH 078/125] Fix up new Django 3.0 deprecations Mostly text based: force/smart_text, ugettext_* --- awx/api/authentication.py | 6 +- awx/api/conf.py | 2 +- awx/api/exceptions.py | 4 +- awx/api/fields.py | 2 +- awx/api/filters.py | 8 +- awx/api/generics.py | 18 +- awx/api/metadata.py | 8 +- awx/api/parsers.py | 2 +- awx/api/serializers.py | 22 +- awx/api/views/__init__.py | 2 +- awx/api/views/inventory.py | 2 +- awx/api/views/mesh_visualizer.py | 2 +- awx/api/views/metrics.py | 2 +- awx/api/views/mixin.py | 2 +- awx/api/views/organization.py | 2 +- awx/api/views/root.py | 14 +- awx/api/views/webhooks.py | 2 +- awx/conf/apps.py | 2 +- awx/conf/conf.py | 2 +- awx/conf/fields.py | 2 +- awx/conf/registry.py | 2 +- awx/conf/tests/unit/test_registry.py | 2 +- awx/conf/tests/unit/test_settings.py | 2 +- awx/conf/views.py | 2 +- awx/main/access.py | 2 +- awx/main/analytics/collectors.py | 2 +- awx/main/apps.py | 2 +- awx/main/conf.py | 2 +- awx/main/constants.py | 2 +- awx/main/credential_plugins/aim.py | 2 +- awx/main/credential_plugins/azure_kv.py | 2 +- awx/main/credential_plugins/centrify_vault.py | 230 ++++++++--------- awx/main/credential_plugins/conjur.py | 2 +- awx/main/credential_plugins/dsv.py | 2 +- awx/main/credential_plugins/hashivault.py | 2 +- awx/main/credential_plugins/tss.py | 2 +- awx/main/fields.py | 6 +- .../management/commands/inventory_import.py | 8 +- awx/main/middleware.py | 2 +- awx/main/migrations/_inventory_source.py | 4 +- awx/main/migrations/_inventory_source_vars.py | 2 +- awx/main/models/activity_stream.py | 2 +- awx/main/models/ad_hoc_commands.py | 2 +- awx/main/models/base.py | 2 +- awx/main/models/credential/__init__.py | 232 +++++++++--------- awx/main/models/events.py | 6 +- awx/main/models/execution_environments.py | 2 +- awx/main/models/ha.py | 2 +- awx/main/models/inventory.py | 2 +- awx/main/models/jobs.py | 2 +- awx/main/models/label.py | 2 +- awx/main/models/mixins.py | 2 +- awx/main/models/notifications.py | 6 +- awx/main/models/oauth.py | 2 +- awx/main/models/organization.py | 2 +- awx/main/models/projects.py | 8 +- awx/main/models/rbac.py | 2 +- awx/main/models/schedules.py | 2 +- awx/main/models/unified_jobs.py | 8 +- awx/main/models/workflow.py | 2 +- awx/main/notifications/grafana_backend.py | 12 +- awx/main/notifications/irc_backend.py | 6 +- awx/main/notifications/mattermost_backend.py | 8 +- awx/main/notifications/pagerduty_backend.py | 8 +- awx/main/notifications/rocketchat_backend.py | 8 +- awx/main/notifications/slack_backend.py | 6 +- awx/main/notifications/twilio_backend.py | 8 +- awx/main/notifications/webhook_backend.py | 8 +- awx/main/scheduler/dag_workflow.py | 6 +- awx/main/scheduler/kubernetes.py | 2 +- awx/main/scheduler/task_manager.py | 2 +- awx/main/tasks/jobs.py | 2 +- awx/main/tasks/system.py | 2 +- .../tests/docs/test_swagger_generation.py | 6 +- .../tests/unit/scheduler/test_dag_workflow.py | 14 +- awx/main/utils/common.py | 2 +- awx/main/utils/licensing.py | 2 +- awx/main/validators.py | 2 +- awx/main/views.py | 2 +- awx/sso/apps.py | 2 +- awx/sso/backends.py | 4 +- awx/sso/conf.py | 2 +- awx/sso/fields.py | 2 +- awx/sso/models.py | 2 +- awx/sso/pipeline.py | 2 +- awx/sso/validators.py | 2 +- awx/sso/views.py | 6 +- awx/ui/apps.py | 2 +- awx/ui/conf.py | 2 +- awx/ui/fields.py | 2 +- awx/ui/urls.py | 2 +- 91 files changed, 406 insertions(+), 406 deletions(-) diff --git a/awx/api/authentication.py b/awx/api/authentication.py index 52b3462005..48fc00db44 100644 --- a/awx/api/authentication.py +++ b/awx/api/authentication.py @@ -6,7 +6,7 @@ import logging # Django from django.conf import settings -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str # Django REST Framework from rest_framework import authentication @@ -24,7 +24,7 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication): ret = super(LoggedBasicAuthentication, self).authenticate(request) if ret: username = ret[0].username if ret[0] else '' - logger.info(smart_text(u"User {} performed a {} to {} through the API".format(username, request.method, request.path))) + logger.info(smart_str(u"User {} performed a {} to {} through the API".format(username, request.method, request.path))) return ret def authenticate_header(self, request): @@ -45,7 +45,7 @@ class LoggedOAuth2Authentication(OAuth2Authentication): user, token = ret username = user.username if user else '' logger.info( - smart_text(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk)) + smart_str(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk)) ) setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x]) return ret diff --git a/awx/api/conf.py b/awx/api/conf.py index 00c712a064..fd1467cdde 100644 --- a/awx/api/conf.py +++ b/awx/api/conf.py @@ -1,6 +1,6 @@ # Django from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import serializers diff --git a/awx/api/exceptions.py b/awx/api/exceptions.py index 8f2c079583..406bd5e85f 100644 --- a/awx/api/exceptions.py +++ b/awx/api/exceptions.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import ValidationError @@ -13,7 +13,7 @@ class ActiveJobConflict(ValidationError): def __init__(self, active_jobs): # During APIException.__init__(), Django Rest Framework - # turn everything in self.detail into string by using force_text. + # turn everything in self.detail into string by using force_str. # Declare detail afterwards circumvent this behavior. super(ActiveJobConflict, self).__init__() self.detail = {"error": _("Resource is being used by running jobs."), "active_jobs": active_jobs} diff --git a/awx/api/fields.py b/awx/api/fields.py index 6f288f2bce..98c1bd8eac 100644 --- a/awx/api/fields.py +++ b/awx/api/fields.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ObjectDoesNotExist # Django REST Framework diff --git a/awx/api/filters.py b/awx/api/filters.py index 138478135b..2856e58f76 100644 --- a/awx/api/filters.py +++ b/awx/api/filters.py @@ -14,8 +14,8 @@ from django.db.models.fields import FieldDoesNotExist from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.fields import GenericForeignKey -from django.utils.encoding import force_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import force_str +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import ParseError, PermissionDenied @@ -185,7 +185,7 @@ class FieldLookupBackend(BaseFilterBackend): return (field_list[-1], new_lookup) def to_python_related(self, value): - value = force_text(value) + value = force_str(value) if value.lower() in ('none', 'null'): return None else: @@ -293,7 +293,7 @@ class FieldLookupBackend(BaseFilterBackend): search_filter_relation = 'AND' values = reduce(lambda list1, list2: list1 + list2, [i.split(',') for i in values]) for value in values: - search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_text(value)) + search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_str(value)) assert isinstance(new_keys, list) search_filters[search_value] = new_keys # by definition, search *only* joins across relations, diff --git a/awx/api/generics.py b/awx/api/generics.py index 58ed5a9801..14ebde7fce 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -17,10 +17,10 @@ from django.db.models.fields.related import OneToOneRel from django.http import QueryDict from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from django.utils.safestring import mark_safe from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.contrib.auth import views as auth_views # Django REST Framework @@ -93,10 +93,10 @@ class LoggedLoginView(auth_views.LoginView): ret = super(LoggedLoginView, self).post(request, *args, **kwargs) current_user = getattr(request, 'user', None) if request.user.is_authenticated: - logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None)))) + logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None)))) ret.set_cookie('userLoggedIn', 'true') current_user = UserSerializer(self.request.user) - current_user = smart_text(JSONRenderer().render(current_user.data)) + current_user = smart_str(JSONRenderer().render(current_user.data)) current_user = urllib.parse.quote('%s' % current_user, '') ret.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None) ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) @@ -104,7 +104,7 @@ class LoggedLoginView(auth_views.LoginView): return ret else: if 'username' in self.request.POST: - logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None)))) + logger.warn(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None)))) ret.status_code = 401 return ret @@ -392,8 +392,8 @@ class GenericAPIView(generics.GenericAPIView, APIView): if hasattr(self.model._meta, "verbose_name"): d.update( { - 'model_verbose_name': smart_text(self.model._meta.verbose_name), - 'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural), + 'model_verbose_name': smart_str(self.model._meta.verbose_name), + 'model_verbose_name_plural': smart_str(self.model._meta.verbose_name_plural), } ) serializer = self.get_serializer() @@ -524,8 +524,8 @@ class SubListAPIView(ParentMixin, ListAPIView): d = super(SubListAPIView, self).get_description_context() d.update( { - 'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name), - 'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural), + 'parent_model_verbose_name': smart_str(self.parent_model._meta.verbose_name), + 'parent_model_verbose_name_plural': smart_str(self.parent_model._meta.verbose_name_plural), } ) return d diff --git a/awx/api/metadata.py b/awx/api/metadata.py index 5b8cf2ccb3..efc3f8b09e 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -9,8 +9,8 @@ from django.core.exceptions import PermissionDenied from django.db.models.fields import PositiveIntegerField, BooleanField from django.db.models.fields.related import ForeignKey from django.http import Http404 -from django.utils.encoding import force_text, smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import force_str, smart_str +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import exceptions @@ -53,7 +53,7 @@ class Metadata(metadata.SimpleMetadata): for attr in text_attrs: value = getattr(field, attr, None) if value is not None and value != '': - field_info[attr] = force_text(value, strings_only=True) + field_info[attr] = force_str(value, strings_only=True) placeholder = getattr(field, 'placeholder', serializers.empty) if placeholder is not serializers.empty: @@ -77,7 +77,7 @@ class Metadata(metadata.SimpleMetadata): } if field.field_name in field_help_text: opts = serializer.Meta.model._meta.concrete_model._meta - verbose_name = smart_text(opts.verbose_name) + verbose_name = smart_str(opts.verbose_name) field_info['help_text'] = field_help_text[field.field_name].format(verbose_name) if field.field_name == 'type': diff --git a/awx/api/parsers.py b/awx/api/parsers.py index ce18bce0af..ac06a35b81 100644 --- a/awx/api/parsers.py +++ b/awx/api/parsers.py @@ -5,7 +5,7 @@ import json # Django from django.conf import settings from django.utils.encoding import smart_str -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import parsers diff --git a/awx/api/serializers.py b/awx/api/serializers.py index ff8e654f55..8833f1587e 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -25,8 +25,8 @@ from django.contrib.auth.password_validation import validate_password as django_ from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist, ValidationError as DjangoValidationError from django.db import models -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import force_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import force_str from django.utils.text import capfirst from django.utils.timezone import now from django.utils.functional import cached_property @@ -357,7 +357,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl } choices = [] for t in self.get_types(): - name = _(type_name_map.get(t, force_text(get_model_for_type(t)._meta.verbose_name).title())) + name = _(type_name_map.get(t, force_str(get_model_for_type(t)._meta.verbose_name).title())) choices.append((t, name)) return choices @@ -645,7 +645,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl v2.extend(e) else: v2.append(e) - d[k] = list(map(force_text, v2)) + d[k] = list(map(force_str, v2)) raise ValidationError(d) return attrs @@ -1847,11 +1847,11 @@ class HostSerializer(BaseSerializerWithVariables): if port < 1 or port > 65535: raise ValueError except ValueError: - raise serializers.ValidationError(_(u'Invalid port specification: %s') % force_text(port)) + raise serializers.ValidationError(_(u'Invalid port specification: %s') % force_str(port)) return name, port def validate_name(self, value): - name = force_text(value or '') + name = force_str(value or '') # Validate here only, update in main validate method. host, port = self._get_host_port_from_name(name) return value @@ -1865,13 +1865,13 @@ class HostSerializer(BaseSerializerWithVariables): return vars_validate_or_raise(value) def validate(self, attrs): - name = force_text(attrs.get('name', self.instance and self.instance.name or '')) + name = force_str(attrs.get('name', self.instance and self.instance.name or '')) inventory = attrs.get('inventory', self.instance and self.instance.inventory or '') host, port = self._get_host_port_from_name(name) if port: attrs['name'] = host - variables = force_text(attrs.get('variables', self.instance and self.instance.variables or '')) + variables = force_str(attrs.get('variables', self.instance and self.instance.variables or '')) vars_dict = parse_yaml_or_json(variables) vars_dict['ansible_ssh_port'] = port attrs['variables'] = json.dumps(vars_dict) @@ -1944,7 +1944,7 @@ class GroupSerializer(BaseSerializerWithVariables): return res def validate(self, attrs): - name = force_text(attrs.get('name', self.instance and self.instance.name or '')) + name = force_str(attrs.get('name', self.instance and self.instance.name or '')) inventory = attrs.get('inventory', self.instance and self.instance.inventory or '') if Host.objects.filter(name=name, inventory=inventory).exists(): raise serializers.ValidationError(_('A Host with that name already exists.')) @@ -2838,8 +2838,8 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer): if not project: raise serializers.ValidationError({'project': _('This field is required.')}) playbook_not_found = bool( - (project and project.scm_type and (not project.allow_override) and playbook and force_text(playbook) not in project.playbook_files) - or (project and not project.scm_type and playbook and force_text(playbook) not in project.playbooks) # manual + (project and project.scm_type and (not project.allow_override) and playbook and force_str(playbook) not in project.playbook_files) + or (project and not project.scm_type and playbook and force_str(playbook) not in project.playbooks) # manual ) if playbook_not_found: raise serializers.ValidationError({'playbook': _('Playbook not found for project.')}) diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 163fa4e727..8eba0c29ca 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -29,7 +29,7 @@ from django.views.decorators.csrf import csrf_exempt from django.template.loader import render_to_string from django.http import HttpResponse from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework diff --git a/awx/api/views/inventory.py b/awx/api/views/inventory.py index dfa7204f80..43815ae565 100644 --- a/awx/api/views/inventory.py +++ b/awx/api/views/inventory.py @@ -8,7 +8,7 @@ import logging from django.conf import settings from django.db.models import Q from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import PermissionDenied diff --git a/awx/api/views/mesh_visualizer.py b/awx/api/views/mesh_visualizer.py index d2c04f0962..e790069700 100644 --- a/awx/api/views/mesh_visualizer.py +++ b/awx/api/views/mesh_visualizer.py @@ -1,7 +1,7 @@ # Copyright (c) 2018 Red Hat, Inc. # All Rights Reserved. -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.api.generics import APIView, Response from awx.api.permissions import IsSystemAdminOrAuditor diff --git a/awx/api/views/metrics.py b/awx/api/views/metrics.py index 5a37092dd4..1634293cab 100644 --- a/awx/api/views/metrics.py +++ b/awx/api/views/metrics.py @@ -5,7 +5,7 @@ import logging # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.response import Response diff --git a/awx/api/views/mixin.py b/awx/api/views/mixin.py index 059e1120f7..2ba254d3b3 100644 --- a/awx/api/views/mixin.py +++ b/awx/api/views/mixin.py @@ -8,7 +8,7 @@ from django.db.models import Count from django.db import transaction from django.shortcuts import get_object_or_404 from django.utils.timezone import now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from rest_framework.permissions import SAFE_METHODS from rest_framework.exceptions import PermissionDenied diff --git a/awx/api/views/organization.py b/awx/api/views/organization.py index 4a12a7d599..099edcadb0 100644 --- a/awx/api/views/organization.py +++ b/awx/api/views/organization.py @@ -7,7 +7,7 @@ import logging # Django from django.db.models import Count from django.contrib.contenttypes.models import ContentType -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.main.models import ( diff --git a/awx/api/views/root.py b/awx/api/views/root.py index 675daa2569..d879e4537e 100644 --- a/awx/api/views/root.py +++ b/awx/api/views/root.py @@ -8,11 +8,11 @@ import operator from collections import OrderedDict from django.conf import settings -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from django.utils.decorators import method_decorator from django.views.decorators.csrf import ensure_csrf_cookie from django.template.loader import render_to_string -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.response import Response @@ -205,7 +205,7 @@ class ApiV2SubscriptionView(APIView): elif isinstance(exc, (ValueError, OSError)) and exc.args: msg = exc.args[0] else: - logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST) return Response(validated) @@ -246,7 +246,7 @@ class ApiV2AttachView(APIView): elif isinstance(exc, (ValueError, OSError)) and exc.args: msg = exc.args[0] else: - logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.exception(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST) for sub in validated: if sub['pool_id'] == pool_id: @@ -322,7 +322,7 @@ class ApiV2ConfigView(APIView): try: data_actual = json.dumps(request.data) except Exception: - logger.info(smart_text(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username)) + logger.info(smart_str(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username)) return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST) license_data = json.loads(data_actual) @@ -346,7 +346,7 @@ class ApiV2ConfigView(APIView): try: license_data_validated = get_licenser().license_from_manifest(license_data) except Exception: - logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST) else: license_data_validated = get_licenser().validate() @@ -357,7 +357,7 @@ class ApiV2ConfigView(APIView): settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host()) return Response(license_data_validated) - logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) + logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST) def delete(self, request): diff --git a/awx/api/views/webhooks.py b/awx/api/views/webhooks.py index 921839a3f5..c3d1604b0a 100644 --- a/awx/api/views/webhooks.py +++ b/awx/api/views/webhooks.py @@ -4,7 +4,7 @@ import logging import urllib.parse from django.utils.encoding import force_bytes -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.views.decorators.csrf import csrf_exempt from rest_framework import status diff --git a/awx/conf/apps.py b/awx/conf/apps.py index a61e489858..b865c61948 100644 --- a/awx/conf/apps.py +++ b/awx/conf/apps.py @@ -2,7 +2,7 @@ from django.apps import AppConfig # from django.core import checks -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class ConfConfig(AppConfig): diff --git a/awx/conf/conf.py b/awx/conf/conf.py index 45a3442756..019bd1d068 100644 --- a/awx/conf/conf.py +++ b/awx/conf/conf.py @@ -1,6 +1,6 @@ # Django from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.conf import fields, register diff --git a/awx/conf/fields.py b/awx/conf/fields.py index 2ab3a9e8d9..889f71ca23 100644 --- a/awx/conf/fields.py +++ b/awx/conf/fields.py @@ -7,7 +7,7 @@ from collections import OrderedDict # Django from django.core.validators import URLValidator, _lazy_re_compile -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField # noqa diff --git a/awx/conf/registry.py b/awx/conf/registry.py index 36f6eba6d2..da056e99db 100644 --- a/awx/conf/registry.py +++ b/awx/conf/registry.py @@ -8,7 +8,7 @@ import logging # Django from django.core.exceptions import ImproperlyConfigured from django.utils.text import slugify -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.conf.license import get_license diff --git a/awx/conf/tests/unit/test_registry.py b/awx/conf/tests/unit/test_registry.py index 6a817985e2..1ce4dceaaf 100644 --- a/awx/conf/tests/unit/test_registry.py +++ b/awx/conf/tests/unit/test_registry.py @@ -6,7 +6,7 @@ from uuid import uuid4 from django.conf import LazySettings from django.core.cache.backends.locmem import LocMemCache from django.core.exceptions import ImproperlyConfigured -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from rest_framework.fields import empty import pytest diff --git a/awx/conf/tests/unit/test_settings.py b/awx/conf/tests/unit/test_settings.py index da97d41c6f..a184fa3191 100644 --- a/awx/conf/tests/unit/test_settings.py +++ b/awx/conf/tests/unit/test_settings.py @@ -11,7 +11,7 @@ import time from django.conf import LazySettings from django.core.cache.backends.locmem import LocMemCache from django.core.exceptions import ImproperlyConfigured -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ import pytest from awx.conf import models, fields diff --git a/awx/conf/views.py b/awx/conf/views.py index a9eae07409..b2b312d834 100644 --- a/awx/conf/views.py +++ b/awx/conf/views.py @@ -13,7 +13,7 @@ from socket import SHUT_RDWR from django.db import connection from django.conf import settings from django.http import Http404 -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework.exceptions import PermissionDenied diff --git a/awx/main/access.py b/awx/main/access.py index 06b560b9ae..1ddf794964 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -11,7 +11,7 @@ from functools import reduce from django.conf import settings from django.db.models import Q, Prefetch from django.contrib.auth.models import User -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ObjectDoesNotExist # Django REST Framework diff --git a/awx/main/analytics/collectors.py b/awx/main/analytics/collectors.py index 1270c87315..ee52dece89 100644 --- a/awx/main/analytics/collectors.py +++ b/awx/main/analytics/collectors.py @@ -10,7 +10,7 @@ from django.db.models import Count from django.conf import settings from django.contrib.sessions.models import Session from django.utils.timezone import now, timedelta -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from psycopg2.errors import UntranslatableCharacter diff --git a/awx/main/apps.py b/awx/main/apps.py index b45b3c20f2..abd3332fd0 100644 --- a/awx/main/apps.py +++ b/awx/main/apps.py @@ -1,5 +1,5 @@ from django.apps import AppConfig -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class MainConfig(AppConfig): diff --git a/awx/main/conf.py b/awx/main/conf.py index c754ecc92a..0099fbe3ad 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -2,7 +2,7 @@ import logging # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import serializers diff --git a/awx/main/constants.py b/awx/main/constants.py index d87bf82983..cda6dd3a67 100644 --- a/awx/main/constants.py +++ b/awx/main/constants.py @@ -3,7 +3,7 @@ import re -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ __all__ = [ 'CLOUD_PROVIDERS', diff --git a/awx/main/credential_plugins/aim.py b/awx/main/credential_plugins/aim.py index 235511f959..95bf767508 100644 --- a/awx/main/credential_plugins/aim.py +++ b/awx/main/credential_plugins/aim.py @@ -2,7 +2,7 @@ from .plugin import CredentialPlugin, CertFiles, raise_for_status from urllib.parse import quote, urlencode, urljoin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ import requests aim_inputs = { diff --git a/awx/main/credential_plugins/azure_kv.py b/awx/main/credential_plugins/azure_kv.py index 58580edf9a..eecfde65b1 100644 --- a/awx/main/credential_plugins/azure_kv.py +++ b/awx/main/credential_plugins/azure_kv.py @@ -1,6 +1,6 @@ from .plugin import CredentialPlugin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from azure.keyvault import KeyVaultClient, KeyVaultAuthentication from azure.common.credentials import ServicePrincipalCredentials from msrestazure import azure_cloud diff --git a/awx/main/credential_plugins/centrify_vault.py b/awx/main/credential_plugins/centrify_vault.py index a0be2250f4..1e05625e71 100644 --- a/awx/main/credential_plugins/centrify_vault.py +++ b/awx/main/credential_plugins/centrify_vault.py @@ -1,115 +1,115 @@ -from .plugin import CredentialPlugin, raise_for_status -from django.utils.translation import ugettext_lazy as _ -from urllib.parse import urljoin -import requests - -pas_inputs = { - 'fields': [ - { - 'id': 'url', - 'label': _('Centrify Tenant URL'), - 'type': 'string', - 'help_text': _('Centrify Tenant URL'), - 'format': 'url', - }, - { - 'id': 'client_id', - 'label': _('Centrify API User'), - 'type': 'string', - 'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'), - }, - { - 'id': 'client_password', - 'label': _('Centrify API Password'), - 'type': 'string', - 'help_text': _('Password of Centrify API User with necessary permissions'), - 'secret': True, - }, - { - 'id': 'oauth_application_id', - 'label': _('OAuth2 Application ID'), - 'type': 'string', - 'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'), - 'default': 'awx', - }, - { - 'id': 'oauth_scope', - 'label': _('OAuth2 Scope'), - 'type': 'string', - 'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'), - 'default': 'awx', - }, - ], - 'metadata': [ - { - 'id': 'account-name', - 'label': _('Account Name'), - 'type': 'string', - 'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'), - }, - { - 'id': 'system-name', - 'label': _('System Name'), - 'type': 'string', - 'help_text': _('Machine Name enrolled with in Centrify Portal'), - }, - ], - 'required': ['url', 'account-name', 'system-name', 'client_id', 'client_password'], -} - - -# generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret -def handle_auth(**kwargs): - post_data = {"grant_type": "client_credentials", "scope": kwargs['oauth_scope']} - response = requests.post(kwargs['endpoint'], data=post_data, auth=(kwargs['client_id'], kwargs['client_password']), verify=True, timeout=(5, 30)) - raise_for_status(response) - try: - return response.json()['access_token'] - except KeyError: - raise RuntimeError('OAuth request to tenant was unsuccessful') - - -# fetch the ID of system with RedRock query, Input : System Name, Account Name -def get_ID(**kwargs): - endpoint = urljoin(kwargs['url'], '/Redrock/query') - name = " Name='{0}' and User='{1}'".format(kwargs['system_name'], kwargs['acc_name']) - query = 'Select ID from VaultAccount where {0}'.format(name) - post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} - response = requests.post(endpoint, json={'Script': query}, headers=post_headers, verify=True, timeout=(5, 30)) - raise_for_status(response) - try: - result_str = response.json()["Result"]["Results"] - return result_str[0]["Row"]["ID"] - except (IndexError, KeyError): - raise RuntimeError("Error Detected!! Check the Inputs") - - -# CheckOut Password from Centrify Vault, Input : ID -def get_passwd(**kwargs): - endpoint = urljoin(kwargs['url'], '/ServerManage/CheckoutPassword') - post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} - response = requests.post(endpoint, json={'ID': kwargs['acc_id']}, headers=post_headers, verify=True, timeout=(5, 30)) - raise_for_status(response) - try: - return response.json()["Result"]["Password"] - except KeyError: - raise RuntimeError("Password Not Found") - - -def centrify_backend(**kwargs): - url = kwargs.get('url') - acc_name = kwargs.get('account-name') - system_name = kwargs.get('system-name') - client_id = kwargs.get('client_id') - client_password = kwargs.get('client_password') - app_id = kwargs.get('oauth_application_id', 'awx') - endpoint = urljoin(url, f'/oauth2/token/{app_id}') - endpoint = {'endpoint': endpoint, 'client_id': client_id, 'client_password': client_password, 'oauth_scope': kwargs.get('oauth_scope', 'awx')} - token = handle_auth(**endpoint) - get_id_args = {'system_name': system_name, 'acc_name': acc_name, 'url': url, 'access_token': token} - acc_id = get_ID(**get_id_args) - get_pwd_args = {'url': url, 'acc_id': acc_id, 'access_token': token} - return get_passwd(**get_pwd_args) - - -centrify_plugin = CredentialPlugin('Centrify Vault Credential Provider Lookup', inputs=pas_inputs, backend=centrify_backend) +from .plugin import CredentialPlugin, raise_for_status +from django.utils.translation import gettext_lazy as _ +from urllib.parse import urljoin +import requests + +pas_inputs = { + 'fields': [ + { + 'id': 'url', + 'label': _('Centrify Tenant URL'), + 'type': 'string', + 'help_text': _('Centrify Tenant URL'), + 'format': 'url', + }, + { + 'id': 'client_id', + 'label': _('Centrify API User'), + 'type': 'string', + 'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'), + }, + { + 'id': 'client_password', + 'label': _('Centrify API Password'), + 'type': 'string', + 'help_text': _('Password of Centrify API User with necessary permissions'), + 'secret': True, + }, + { + 'id': 'oauth_application_id', + 'label': _('OAuth2 Application ID'), + 'type': 'string', + 'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'), + 'default': 'awx', + }, + { + 'id': 'oauth_scope', + 'label': _('OAuth2 Scope'), + 'type': 'string', + 'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'), + 'default': 'awx', + }, + ], + 'metadata': [ + { + 'id': 'account-name', + 'label': _('Account Name'), + 'type': 'string', + 'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'), + }, + { + 'id': 'system-name', + 'label': _('System Name'), + 'type': 'string', + 'help_text': _('Machine Name enrolled with in Centrify Portal'), + }, + ], + 'required': ['url', 'account-name', 'system-name', 'client_id', 'client_password'], +} + + +# generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret +def handle_auth(**kwargs): + post_data = {"grant_type": "client_credentials", "scope": kwargs['oauth_scope']} + response = requests.post(kwargs['endpoint'], data=post_data, auth=(kwargs['client_id'], kwargs['client_password']), verify=True, timeout=(5, 30)) + raise_for_status(response) + try: + return response.json()['access_token'] + except KeyError: + raise RuntimeError('OAuth request to tenant was unsuccessful') + + +# fetch the ID of system with RedRock query, Input : System Name, Account Name +def get_ID(**kwargs): + endpoint = urljoin(kwargs['url'], '/Redrock/query') + name = " Name='{0}' and User='{1}'".format(kwargs['system_name'], kwargs['acc_name']) + query = 'Select ID from VaultAccount where {0}'.format(name) + post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} + response = requests.post(endpoint, json={'Script': query}, headers=post_headers, verify=True, timeout=(5, 30)) + raise_for_status(response) + try: + result_str = response.json()["Result"]["Results"] + return result_str[0]["Row"]["ID"] + except (IndexError, KeyError): + raise RuntimeError("Error Detected!! Check the Inputs") + + +# CheckOut Password from Centrify Vault, Input : ID +def get_passwd(**kwargs): + endpoint = urljoin(kwargs['url'], '/ServerManage/CheckoutPassword') + post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"} + response = requests.post(endpoint, json={'ID': kwargs['acc_id']}, headers=post_headers, verify=True, timeout=(5, 30)) + raise_for_status(response) + try: + return response.json()["Result"]["Password"] + except KeyError: + raise RuntimeError("Password Not Found") + + +def centrify_backend(**kwargs): + url = kwargs.get('url') + acc_name = kwargs.get('account-name') + system_name = kwargs.get('system-name') + client_id = kwargs.get('client_id') + client_password = kwargs.get('client_password') + app_id = kwargs.get('oauth_application_id', 'awx') + endpoint = urljoin(url, f'/oauth2/token/{app_id}') + endpoint = {'endpoint': endpoint, 'client_id': client_id, 'client_password': client_password, 'oauth_scope': kwargs.get('oauth_scope', 'awx')} + token = handle_auth(**endpoint) + get_id_args = {'system_name': system_name, 'acc_name': acc_name, 'url': url, 'access_token': token} + acc_id = get_ID(**get_id_args) + get_pwd_args = {'url': url, 'acc_id': acc_id, 'access_token': token} + return get_passwd(**get_pwd_args) + + +centrify_plugin = CredentialPlugin('Centrify Vault Credential Provider Lookup', inputs=pas_inputs, backend=centrify_backend) diff --git a/awx/main/credential_plugins/conjur.py b/awx/main/credential_plugins/conjur.py index b9606d48bc..5ae6be27f3 100644 --- a/awx/main/credential_plugins/conjur.py +++ b/awx/main/credential_plugins/conjur.py @@ -3,7 +3,7 @@ from .plugin import CredentialPlugin, CertFiles, raise_for_status import base64 from urllib.parse import urljoin, quote -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ import requests diff --git a/awx/main/credential_plugins/dsv.py b/awx/main/credential_plugins/dsv.py index d256b27647..9c89199710 100644 --- a/awx/main/credential_plugins/dsv.py +++ b/awx/main/credential_plugins/dsv.py @@ -1,7 +1,7 @@ from .plugin import CredentialPlugin from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from thycotic.secrets.vault import SecretsVault diff --git a/awx/main/credential_plugins/hashivault.py b/awx/main/credential_plugins/hashivault.py index c179fcd1e7..c55ac2f4e6 100644 --- a/awx/main/credential_plugins/hashivault.py +++ b/awx/main/credential_plugins/hashivault.py @@ -6,7 +6,7 @@ from urllib.parse import urljoin from .plugin import CredentialPlugin, CertFiles, raise_for_status import requests -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ base_inputs = { 'fields': [ diff --git a/awx/main/credential_plugins/tss.py b/awx/main/credential_plugins/tss.py index bf83693860..172a8aef00 100644 --- a/awx/main/credential_plugins/tss.py +++ b/awx/main/credential_plugins/tss.py @@ -1,5 +1,5 @@ from .plugin import CredentialPlugin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from thycotic.secrets.server import PasswordGrantAuthorizer, SecretServer, ServerSecret diff --git a/awx/main/fields.py b/awx/main/fields.py index 71bf0c612f..1c470f11dd 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -28,9 +28,9 @@ from django.db.models.fields.related_descriptors import ( ReverseManyToOneDescriptor, create_forward_many_to_many_manager, ) -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from django.utils.functional import cached_property -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # jsonschema from jsonschema import Draft4Validator, FormatChecker @@ -144,7 +144,7 @@ def resolve_role_field(obj, field): # use extremely generous duck typing to accomidate all possible forms # of the model that may be used during various migrations if obj._meta.model_name != 'role' or obj._meta.app_label != 'main': - raise Exception(smart_text('{} refers to a {}, not a Role'.format(field, type(obj)))) + raise Exception(smart_str('{} refers to a {}, not a Role'.format(field, type(obj)))) ret.append(obj.id) else: if type(obj) is ManyToManyDescriptor: diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index 0854784f10..f710229e7e 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -16,7 +16,7 @@ from collections import OrderedDict from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.db import connection, transaction -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str # DRF error class to distinguish license exceptions from rest_framework.exceptions import PermissionDenied @@ -109,8 +109,8 @@ class AnsibleInventoryLoader(object): proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() - stdout = smart_text(stdout) - stderr = smart_text(stderr) + stdout = smart_str(stdout) + stderr = smart_str(stderr) if proc.returncode != 0: raise RuntimeError('%s failed (rc=%d) with stdout:\n%s\nstderr:\n%s' % ('ansible-inventory', proc.returncode, stdout, stderr)) @@ -224,7 +224,7 @@ class Command(BaseCommand): from_dict = instance_id if instance_id: break - return smart_text(instance_id) + return smart_str(instance_id) def _get_enabled(self, from_dict, default=None): """ diff --git a/awx/main/middleware.py b/awx/main/middleware.py index 39caf4a7e7..3bbc7975ed 100644 --- a/awx/main/middleware.py +++ b/awx/main/middleware.py @@ -14,7 +14,7 @@ from django.db import connection from django.shortcuts import redirect from django.apps import apps from django.utils.deprecation import MiddlewareMixin -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.urls import reverse, resolve from awx.main.utils.named_url_graph import generate_graph, GraphNode diff --git a/awx/main/migrations/_inventory_source.py b/awx/main/migrations/_inventory_source.py index e6a65a82d4..ef7cbb088c 100644 --- a/awx/main/migrations/_inventory_source.py +++ b/awx/main/migrations/_inventory_source.py @@ -1,6 +1,6 @@ import logging -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from awx.main.utils.common import set_current_apps from awx.main.utils.common import parse_yaml_or_json @@ -19,7 +19,7 @@ def _get_instance_id(from_dict, new_id, default=''): break instance_id = from_dict.get(key, default) from_dict = instance_id - return smart_text(instance_id) + return smart_str(instance_id) def _get_instance_id_for_upgrade(host, new_id): diff --git a/awx/main/migrations/_inventory_source_vars.py b/awx/main/migrations/_inventory_source_vars.py index 71c96403a6..12bad4e4b8 100644 --- a/awx/main/migrations/_inventory_source_vars.py +++ b/awx/main/migrations/_inventory_source_vars.py @@ -2,7 +2,7 @@ import json import re import logging -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.encoding import iri_to_uri diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index 6215e17a5a..a2f68af27b 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -10,7 +10,7 @@ from awx.main.models.base import accepts_json from django.db import models from django.conf import settings from django.utils.encoding import smart_str -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ __all__ = ['ActivityStream'] diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index f4065e473d..d0608bd652 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -9,7 +9,7 @@ from urllib.parse import urljoin from django.conf import settings from django.db import models from django.utils.text import Truncator -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ValidationError # AWX diff --git a/awx/main/models/base.py b/awx/main/models/base.py index 8cdd557a84..da12f603cb 100644 --- a/awx/main/models/base.py +++ b/awx/main/models/base.py @@ -4,7 +4,7 @@ # Django from django.db import models from django.core.exceptions import ValidationError, ObjectDoesNotExist -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.timezone import now # Django-Taggit diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py index 88c0eedadd..53f6ffaa1f 100644 --- a/awx/main/models/credential/__init__.py +++ b/awx/main/models/credential/__init__.py @@ -15,9 +15,9 @@ from jinja2 import sandbox # Django from django.db import models -from django.utils.translation import ugettext_lazy as _, ugettext_noop +from django.utils.translation import gettext_lazy as _, gettext_noop from django.core.exceptions import ValidationError -from django.utils.encoding import force_text +from django.utils.encoding import force_str from django.utils.functional import cached_property from django.utils.timezone import now @@ -230,7 +230,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin): def display_inputs(self): field_val = self.inputs.copy() for k, v in field_val.items(): - if force_text(v).startswith('$encrypted$'): + if force_str(v).startswith('$encrypted$'): field_val[k] = '$encrypted$' return field_val @@ -579,34 +579,34 @@ class ManagedCredentialType(SimpleNamespace): ManagedCredentialType( namespace='ssh', kind='ssh', - name=ugettext_noop('Machine'), + name=gettext_noop('Machine'), inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, - {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, - {'id': 'ssh_key_data', 'label': ugettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, { 'id': 'ssh_public_key_data', - 'label': ugettext_noop('Signed SSH Certificate'), + 'label': gettext_noop('Signed SSH Certificate'), 'type': 'string', 'multiline': True, 'secret': True, }, - {'id': 'ssh_key_unlock', 'label': ugettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, { 'id': 'become_method', - 'label': ugettext_noop('Privilege Escalation Method'), + 'label': gettext_noop('Privilege Escalation Method'), 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Specify a method for "become" operations. This is ' 'equivalent to specifying the --become-method ' 'Ansible parameter.' ), }, { 'id': 'become_username', - 'label': ugettext_noop('Privilege Escalation Username'), + 'label': gettext_noop('Privilege Escalation Username'), 'type': 'string', }, - {'id': 'become_password', 'label': ugettext_noop('Privilege Escalation Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'become_password', 'label': gettext_noop('Privilege Escalation Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, ], }, ) @@ -614,14 +614,14 @@ ManagedCredentialType( ManagedCredentialType( namespace='scm', kind='scm', - name=ugettext_noop('Source Control'), + name=gettext_noop('Source Control'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, - {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True}, - {'id': 'ssh_key_data', 'label': ugettext_noop('SCM Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, - {'id': 'ssh_key_unlock', 'label': ugettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SCM Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + {'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True}, ], }, ) @@ -629,17 +629,17 @@ ManagedCredentialType( ManagedCredentialType( namespace='vault', kind='vault', - name=ugettext_noop('Vault'), + name=gettext_noop('Vault'), managed=True, inputs={ 'fields': [ - {'id': 'vault_password', 'label': ugettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'vault_password', 'label': gettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, { 'id': 'vault_id', - 'label': ugettext_noop('Vault Identifier'), + 'label': gettext_noop('Vault Identifier'), 'type': 'string', 'format': 'vault_id', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Specify an (optional) Vault ID. This is ' 'equivalent to specifying the --vault-id ' 'Ansible parameter for providing multiple Vault ' @@ -655,32 +655,32 @@ ManagedCredentialType( ManagedCredentialType( namespace='net', kind='net', - name=ugettext_noop('Network'), + name=gettext_noop('Network'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, - {'id': 'ssh_key_data', 'label': ugettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, { 'id': 'ssh_key_unlock', - 'label': ugettext_noop('Private Key Passphrase'), + 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, }, { 'id': 'authorize', - 'label': ugettext_noop('Authorize'), + 'label': gettext_noop('Authorize'), 'type': 'boolean', }, { 'id': 'authorize_password', - 'label': ugettext_noop('Authorize Password'), + 'label': gettext_noop('Authorize Password'), 'type': 'string', 'secret': True, }, @@ -695,23 +695,23 @@ ManagedCredentialType( ManagedCredentialType( namespace='aws', kind='cloud', - name=ugettext_noop('Amazon Web Services'), + name=gettext_noop('Amazon Web Services'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Access Key'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Access Key'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Secret Key'), + 'label': gettext_noop('Secret Key'), 'type': 'string', 'secret': True, }, { 'id': 'security_token', - 'label': ugettext_noop('STS Token'), + 'label': gettext_noop('STS Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Security Token Service (STS) is a web service ' 'that enables you to request temporary, ' 'limited-privilege credentials for AWS Identity ' @@ -726,38 +726,38 @@ ManagedCredentialType( ManagedCredentialType( namespace='openstack', kind='cloud', - name=ugettext_noop('OpenStack'), + name=gettext_noop('OpenStack'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password (API Key)'), + 'label': gettext_noop('Password (API Key)'), 'type': 'string', 'secret': True, }, { 'id': 'host', - 'label': ugettext_noop('Host (Authentication URL)'), + 'label': gettext_noop('Host (Authentication URL)'), 'type': 'string', - 'help_text': ugettext_noop('The host to authenticate with. For example, ' 'https://openstack.business.com/v2.0/'), + 'help_text': gettext_noop('The host to authenticate with. For example, ' 'https://openstack.business.com/v2.0/'), }, { 'id': 'project', - 'label': ugettext_noop('Project (Tenant Name)'), + 'label': gettext_noop('Project (Tenant Name)'), 'type': 'string', }, { 'id': 'project_domain_name', - 'label': ugettext_noop('Project (Domain Name)'), + 'label': gettext_noop('Project (Domain Name)'), 'type': 'string', }, { 'id': 'domain', - 'label': ugettext_noop('Domain Name'), + 'label': gettext_noop('Domain Name'), 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'OpenStack domains define administrative boundaries. ' 'It is only needed for Keystone v3 authentication ' 'URLs. Refer to the documentation for ' @@ -766,13 +766,13 @@ ManagedCredentialType( }, { 'id': 'region', - 'label': ugettext_noop('Region Name'), + 'label': gettext_noop('Region Name'), 'type': 'string', - 'help_text': ugettext_noop('For some cloud providers, like OVH, region must be specified'), + 'help_text': gettext_noop('For some cloud providers, like OVH, region must be specified'), }, { 'id': 'verify_ssl', - 'label': ugettext_noop('Verify SSL'), + 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'default': True, }, @@ -784,20 +784,20 @@ ManagedCredentialType( ManagedCredentialType( namespace='vmware', kind='cloud', - name=ugettext_noop('VMware vCenter'), + name=gettext_noop('VMware vCenter'), managed=True, inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('VCenter Host'), + 'label': gettext_noop('VCenter Host'), 'type': 'string', - 'help_text': ugettext_noop('Enter the hostname or IP address that corresponds ' 'to your VMware vCenter.'), + 'help_text': gettext_noop('Enter the hostname or IP address that corresponds ' 'to your VMware vCenter.'), }, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, @@ -809,20 +809,20 @@ ManagedCredentialType( ManagedCredentialType( namespace='satellite6', kind='cloud', - name=ugettext_noop('Red Hat Satellite 6'), + name=gettext_noop('Red Hat Satellite 6'), managed=True, inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('Satellite 6 URL'), + 'label': gettext_noop('Satellite 6 URL'), 'type': 'string', - 'help_text': ugettext_noop('Enter the URL that corresponds to your Red Hat ' 'Satellite 6 server. For example, https://satellite.example.org'), + 'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat ' 'Satellite 6 server. For example, https://satellite.example.org'), }, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, @@ -834,21 +834,21 @@ ManagedCredentialType( ManagedCredentialType( namespace='gce', kind='cloud', - name=ugettext_noop('Google Compute Engine'), + name=gettext_noop('Google Compute Engine'), managed=True, inputs={ 'fields': [ { 'id': 'username', - 'label': ugettext_noop('Service Account Email Address'), + 'label': gettext_noop('Service Account Email Address'), 'type': 'string', - 'help_text': ugettext_noop('The email address assigned to the Google Compute ' 'Engine service account.'), + 'help_text': gettext_noop('The email address assigned to the Google Compute ' 'Engine service account.'), }, { 'id': 'project', 'label': 'Project', 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'The Project ID is the GCE assigned identification. ' 'It is often constructed as three words or two words ' 'followed by a three-digit number. Examples: project-id-000 ' @@ -857,12 +857,12 @@ ManagedCredentialType( }, { 'id': 'ssh_key_data', - 'label': ugettext_noop('RSA Private Key'), + 'label': gettext_noop('RSA Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True, - 'help_text': ugettext_noop('Paste the contents of the PEM file associated ' 'with the service account email.'), + 'help_text': gettext_noop('Paste the contents of the PEM file associated ' 'with the service account email.'), }, ], 'required': ['username', 'ssh_key_data'], @@ -872,36 +872,36 @@ ManagedCredentialType( ManagedCredentialType( namespace='azure_rm', kind='cloud', - name=ugettext_noop('Microsoft Azure Resource Manager'), + name=gettext_noop('Microsoft Azure Resource Manager'), managed=True, inputs={ 'fields': [ { 'id': 'subscription', - 'label': ugettext_noop('Subscription ID'), + 'label': gettext_noop('Subscription ID'), 'type': 'string', - 'help_text': ugettext_noop('Subscription ID is an Azure construct, which is ' 'mapped to a username.'), + 'help_text': gettext_noop('Subscription ID is an Azure construct, which is ' 'mapped to a username.'), }, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, - {'id': 'client', 'label': ugettext_noop('Client ID'), 'type': 'string'}, + {'id': 'client', 'label': gettext_noop('Client ID'), 'type': 'string'}, { 'id': 'secret', - 'label': ugettext_noop('Client Secret'), + 'label': gettext_noop('Client Secret'), 'type': 'string', 'secret': True, }, - {'id': 'tenant', 'label': ugettext_noop('Tenant ID'), 'type': 'string'}, + {'id': 'tenant', 'label': gettext_noop('Tenant ID'), 'type': 'string'}, { 'id': 'cloud_environment', - 'label': ugettext_noop('Azure Cloud Environment'), + 'label': gettext_noop('Azure Cloud Environment'), 'type': 'string', - 'help_text': ugettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when' ' using Azure GovCloud or Azure stack.'), + 'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when' ' using Azure GovCloud or Azure stack.'), }, ], 'required': ['subscription'], @@ -911,16 +911,16 @@ ManagedCredentialType( ManagedCredentialType( namespace='github_token', kind='token', - name=ugettext_noop('GitHub Personal Access Token'), + name=gettext_noop('GitHub Personal Access Token'), managed=True, inputs={ 'fields': [ { 'id': 'token', - 'label': ugettext_noop('Token'), + 'label': gettext_noop('Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('This token needs to come from your profile settings in GitHub'), + 'help_text': gettext_noop('This token needs to come from your profile settings in GitHub'), } ], 'required': ['token'], @@ -930,16 +930,16 @@ ManagedCredentialType( ManagedCredentialType( namespace='gitlab_token', kind='token', - name=ugettext_noop('GitLab Personal Access Token'), + name=gettext_noop('GitLab Personal Access Token'), managed=True, inputs={ 'fields': [ { 'id': 'token', - 'label': ugettext_noop('Token'), + 'label': gettext_noop('Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('This token needs to come from your profile settings in GitLab'), + 'help_text': gettext_noop('This token needs to come from your profile settings in GitLab'), } ], 'required': ['token'], @@ -949,12 +949,12 @@ ManagedCredentialType( ManagedCredentialType( namespace='insights', kind='insights', - name=ugettext_noop('Insights'), + name=gettext_noop('Insights'), managed=True, inputs={ 'fields': [ - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, - {'id': 'password', 'label': ugettext_noop('Password'), 'type': 'string', 'secret': True}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True}, ], 'required': ['username', 'password'], }, @@ -973,23 +973,23 @@ ManagedCredentialType( ManagedCredentialType( namespace='rhv', kind='cloud', - name=ugettext_noop('Red Hat Virtualization'), + name=gettext_noop('Red Hat Virtualization'), managed=True, inputs={ 'fields': [ - {'id': 'host', 'label': ugettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': ugettext_noop('The host to authenticate with.')}, - {'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'}, + {'id': 'host', 'label': gettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': gettext_noop('The host to authenticate with.')}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, { 'id': 'ca_file', - 'label': ugettext_noop('CA File'), + 'label': gettext_noop('CA File'), 'type': 'string', - 'help_text': ugettext_noop('Absolute file path to the CA file to use (optional)'), + 'help_text': gettext_noop('Absolute file path to the CA file to use (optional)'), }, ], 'required': ['host', 'username', 'password'], @@ -1017,38 +1017,38 @@ ManagedCredentialType( ManagedCredentialType( namespace='controller', kind='cloud', - name=ugettext_noop('Red Hat Ansible Automation Platform'), + name=gettext_noop('Red Hat Ansible Automation Platform'), managed=True, inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('Red Hat Ansible Automation Platform'), + 'label': gettext_noop('Red Hat Ansible Automation Platform'), 'type': 'string', - 'help_text': ugettext_noop('Red Hat Ansible Automation Platform base URL to authenticate with.'), + 'help_text': gettext_noop('Red Hat Ansible Automation Platform base URL to authenticate with.'), }, { 'id': 'username', - 'label': ugettext_noop('Username'), + 'label': gettext_noop('Username'), 'type': 'string', - 'help_text': ugettext_noop( + 'help_text': gettext_noop( 'Red Hat Ansible Automation Platform username id to authenticate as.' 'This should not be set if an OAuth token is being used.' ), }, { 'id': 'password', - 'label': ugettext_noop('Password'), + 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, }, { 'id': 'oauth_token', - 'label': ugettext_noop('OAuth Token'), + 'label': gettext_noop('OAuth Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('An OAuth token to use to authenticate with.' 'This should not be set if username/password are being used.'), + 'help_text': gettext_noop('An OAuth token to use to authenticate with.' 'This should not be set if username/password are being used.'), }, - {'id': 'verify_ssl', 'label': ugettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False}, + {'id': 'verify_ssl', 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False}, ], 'required': ['host'], }, @@ -1071,30 +1071,30 @@ ManagedCredentialType( ManagedCredentialType( namespace='kubernetes_bearer_token', kind='kubernetes', - name=ugettext_noop('OpenShift or Kubernetes API Bearer Token'), + name=gettext_noop('OpenShift or Kubernetes API Bearer Token'), inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('OpenShift or Kubernetes API Endpoint'), + 'label': gettext_noop('OpenShift or Kubernetes API Endpoint'), 'type': 'string', - 'help_text': ugettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.'), + 'help_text': gettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.'), }, { 'id': 'bearer_token', - 'label': ugettext_noop('API authentication bearer token'), + 'label': gettext_noop('API authentication bearer token'), 'type': 'string', 'secret': True, }, { 'id': 'verify_ssl', - 'label': ugettext_noop('Verify SSL'), + 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'default': True, }, { 'id': 'ssl_ca_cert', - 'label': ugettext_noop('Certificate Authority data'), + 'label': gettext_noop('Certificate Authority data'), 'type': 'string', 'secret': True, 'multiline': True, @@ -1107,31 +1107,31 @@ ManagedCredentialType( ManagedCredentialType( namespace='registry', kind='registry', - name=ugettext_noop('Container Registry'), + name=gettext_noop('Container Registry'), inputs={ 'fields': [ { 'id': 'host', - 'label': ugettext_noop('Authentication URL'), + 'label': gettext_noop('Authentication URL'), 'type': 'string', - 'help_text': ugettext_noop('Authentication endpoint for the container registry.'), + 'help_text': gettext_noop('Authentication endpoint for the container registry.'), 'default': 'quay.io', }, { 'id': 'username', - 'label': ugettext_noop('Username'), + 'label': gettext_noop('Username'), 'type': 'string', }, { 'id': 'password', - 'label': ugettext_noop('Password or Token'), + 'label': gettext_noop('Password or Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('A password or token used to authenticate with'), + 'help_text': gettext_noop('A password or token used to authenticate with'), }, { 'id': 'verify_ssl', - 'label': ugettext_noop('Verify SSL'), + 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'default': True, }, @@ -1144,27 +1144,27 @@ ManagedCredentialType( ManagedCredentialType( namespace='galaxy_api_token', kind='galaxy', - name=ugettext_noop('Ansible Galaxy/Automation Hub API Token'), + name=gettext_noop('Ansible Galaxy/Automation Hub API Token'), inputs={ 'fields': [ { 'id': 'url', - 'label': ugettext_noop('Galaxy Server URL'), + 'label': gettext_noop('Galaxy Server URL'), 'type': 'string', - 'help_text': ugettext_noop('The URL of the Galaxy instance to connect to.'), + 'help_text': gettext_noop('The URL of the Galaxy instance to connect to.'), }, { 'id': 'auth_url', - 'label': ugettext_noop('Auth Server URL'), + 'label': gettext_noop('Auth Server URL'), 'type': 'string', - 'help_text': ugettext_noop('The URL of a Keycloak server token_endpoint, if using ' 'SSO auth.'), + 'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using ' 'SSO auth.'), }, { 'id': 'token', - 'label': ugettext_noop('API Token'), + 'label': gettext_noop('API Token'), 'type': 'string', 'secret': True, - 'help_text': ugettext_noop('A token to use for authentication against the Galaxy instance.'), + 'help_text': gettext_noop('A token to use for authentication against the Galaxy instance.'), }, ], 'required': ['url'], diff --git a/awx/main/models/events.py b/awx/main/models/events.py index f953e7ca61..0d4b60247b 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -10,8 +10,8 @@ from django.db import models, DatabaseError, connection from django.utils.dateparse import parse_datetime from django.utils.text import Truncator from django.utils.timezone import utc, now -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import force_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import force_str from awx.api.versioning import reverse from awx.main import consumers @@ -396,7 +396,7 @@ class BasePlaybookEvent(CreatedModifiedModel): connection.on_commit(_send_notifications) for field in ('playbook', 'play', 'task', 'role'): - value = force_text(event_data.get(field, '')).strip() + value = force_str(event_data.get(field, '')).strip() if value != getattr(self, field): setattr(self, field, value) if settings.LOG_AGGREGATOR_ENABLED: diff --git a/awx/main/models/execution_environments.py b/awx/main/models/execution_environments.py index b0b3dd7579..55ce69098b 100644 --- a/awx/main/models/execution_environments.py +++ b/awx/main/models/execution_environments.py @@ -1,5 +1,5 @@ from django.db import models -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.api.versioning import reverse from awx.main.models.base import CommonModel diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index add2564015..36a3b7ce9e 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -9,7 +9,7 @@ from django.core.validators import MinValueValidator from django.db import models, connection from django.db.models.signals import post_save, post_delete from django.dispatch import receiver -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.conf import settings from django.utils.timezone import now, timedelta diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 0cac6602e0..7e278dd208 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -14,7 +14,7 @@ import yaml # Django from django.conf import settings from django.db import models, connection -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.db import transaction from django.core.exceptions import ValidationError from django.utils.timezone import now diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index e405c98596..c2bc72a7eb 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -19,7 +19,7 @@ from django.db import models # from django.core.cache import cache from django.utils.encoding import smart_str from django.utils.timezone import now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import FieldDoesNotExist # REST Framework diff --git a/awx/main/models/label.py b/awx/main/models/label.py index 18bdb2b025..7ca92d4ff2 100644 --- a/awx/main/models/label.py +++ b/awx/main/models/label.py @@ -3,7 +3,7 @@ # Django from django.db import models -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.api.versioning import reverse diff --git a/awx/main/models/mixins.py b/awx/main/models/mixins.py index 45a3cae885..a5bb14b5a8 100644 --- a/awx/main/models/mixins.py +++ b/awx/main/models/mixins.py @@ -15,7 +15,7 @@ from django.core.exceptions import ValidationError from django.db import models from django.db.models.query import QuerySet from django.utils.crypto import get_random_string -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.main.models.base import prevent_search diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index 860e591e2c..6520c86211 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -10,8 +10,8 @@ from django.db import models from django.conf import settings from django.core.mail.message import EmailMessage from django.db import connection -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_str, force_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str, force_str from jinja2 import sandbox, ChainableUndefined from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError @@ -187,7 +187,7 @@ class NotificationTemplate(CommonModelNameNotUnique): def display_notification_configuration(self): field_val = self.notification_configuration.copy() for field in self.notification_class.init_parameters: - if field in field_val and force_text(field_val[field]).startswith('$encrypted$'): + if field in field_val and force_str(field_val[field]).startswith('$encrypted$'): field_val[field] = '$encrypted$' return field_val diff --git a/awx/main/models/oauth.py b/awx/main/models/oauth.py index b9b4b8c217..c9927f78bd 100644 --- a/awx/main/models/oauth.py +++ b/awx/main/models/oauth.py @@ -6,7 +6,7 @@ import re from django.core.validators import RegexValidator from django.db import models, connection from django.utils.timezone import now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.conf import settings # Django OAuth Toolkit diff --git a/awx/main/models/organization.py b/awx/main/models/organization.py index 277b33315e..30a393d72b 100644 --- a/awx/main/models/organization.py +++ b/awx/main/models/organization.py @@ -8,7 +8,7 @@ from django.db import models from django.contrib.auth.models import User from django.contrib.sessions.models import Session from django.utils.timezone import now as tz_now -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index a2de97e34f..701d05d235 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -9,8 +9,8 @@ import urllib.parse as urlparse # Django from django.conf import settings from django.db import models -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_str, smart_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str from django.utils.text import slugify from django.core.exceptions import ValidationError from django.utils.timezone import now, make_aware, get_default_timezone @@ -214,7 +214,7 @@ class ProjectOptions(models.Model): for filename in filenames: playbook = could_be_playbook(project_path, dirpath, filename) if playbook is not None: - results.append(smart_text(playbook)) + results.append(smart_str(playbook)) return sorted(results, key=lambda x: smart_str(x).lower()) @property @@ -230,7 +230,7 @@ class ProjectOptions(models.Model): for filename in filenames: inv_path = could_be_inventory(project_path, dirpath, filename) if inv_path is not None: - results.append(smart_text(inv_path)) + results.append(smart_str(inv_path)) if len(results) > max_inventory_listing: break if len(results) > max_inventory_listing: diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py index 485f70bd0d..8f54cc3e43 100644 --- a/awx/main/models/rbac.py +++ b/awx/main/models/rbac.py @@ -11,7 +11,7 @@ import re from django.db import models, transaction, connection from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.fields import GenericForeignKey -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.api.versioning import reverse diff --git a/awx/main/models/schedules.py b/awx/main/models/schedules.py index dca50d9232..9793c216ff 100644 --- a/awx/main/models/schedules.py +++ b/awx/main/models/schedules.py @@ -14,7 +14,7 @@ from dateutil.zoneinfo import get_zonefile_instance from django.db import models from django.db.models.query import QuerySet from django.utils.timezone import now, make_aware -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.api.versioning import reverse diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 489cba9799..ef374f82e6 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -19,9 +19,9 @@ from collections import OrderedDict from django.conf import settings from django.db import models, connection from django.core.exceptions import NON_FIELD_ERRORS -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.timezone import now -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from django.contrib.contenttypes.models import ContentType # REST Framework @@ -1090,7 +1090,7 @@ class UnifiedJob( # function assume a str-based fd will be returned; decode # .write() calls on the fly to maintain this interface _write = fd.write - fd.write = lambda s: _write(smart_text(s)) + fd.write = lambda s: _write(smart_str(s)) tbl = self._meta.db_table + 'event' created_by_cond = '' if self.has_unpartitioned_events: @@ -1273,7 +1273,7 @@ class UnifiedJob( id=self.id, name=self.name, url=self.get_ui_url(), - created_by=smart_text(self.created_by), + created_by=smart_str(self.created_by), started=self.started.isoformat() if self.started is not None else None, finished=self.finished.isoformat() if self.finished is not None else None, status=self.status, diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 684e25b967..f9a91aafa7 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -11,7 +11,7 @@ from urllib.parse import urljoin # Django from django.db import connection, models from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ObjectDoesNotExist # from django import settings as tower_settings diff --git a/awx/main/notifications/grafana_backend.py b/awx/main/notifications/grafana_backend.py index 4e9a7a6262..51a27a897e 100644 --- a/awx/main/notifications/grafana_backend.py +++ b/awx/main/notifications/grafana_backend.py @@ -7,8 +7,8 @@ import logging import requests import dateutil.parser as dp -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -82,9 +82,9 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase): if m.body.get('finished'): grafana_data['timeEnd'] = int((dp.parse(m.body['finished']).replace(tzinfo=None) - epoch).total_seconds() * 1000) except ValueError: - logger.error(smart_text(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) + logger.error(smart_str(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) if not self.fail_silently: - raise Exception(smart_text(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) + raise Exception(smart_str(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'], m.body['finished']))) grafana_data['isRegion'] = self.isRegion grafana_data['dashboardId'] = self.dashboardId grafana_data['panelId'] = self.panelId @@ -97,8 +97,8 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase): "{}/api/annotations".format(m.recipients()[0]), json=grafana_data, headers=grafana_headers, verify=(not self.grafana_no_verify_ssl) ) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification grafana: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification grafana: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification grafana: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification grafana: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py index d020de824d..20a5523b52 100644 --- a/awx/main/notifications/irc_backend.py +++ b/awx/main/notifications/irc_backend.py @@ -7,8 +7,8 @@ import logging import irc.client -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -55,7 +55,7 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase): connect_factory=connection_factory, ) except irc.client.ServerConnectionError as e: - logger.error(smart_text(_("Exception connecting to irc server: {}").format(e))) + logger.error(smart_str(_("Exception connecting to irc server: {}").format(e))) if not self.fail_silently: raise return True diff --git a/awx/main/notifications/mattermost_backend.py b/awx/main/notifications/mattermost_backend.py index b9cc513ba7..c96b3e9f54 100644 --- a/awx/main/notifications/mattermost_backend.py +++ b/awx/main/notifications/mattermost_backend.py @@ -4,8 +4,8 @@ import logging import requests -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -44,8 +44,8 @@ class MattermostBackend(AWXBaseEmailBackend, CustomNotificationBase): r = requests.post("{}".format(m.recipients()[0]), json=payload, verify=(not self.mattermost_no_verify_ssl)) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification mattermost: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification mattermost: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification mattermost: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification mattermost: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/pagerduty_backend.py b/awx/main/notifications/pagerduty_backend.py index 8cde9e3cfd..cfc3073ed4 100644 --- a/awx/main/notifications/pagerduty_backend.py +++ b/awx/main/notifications/pagerduty_backend.py @@ -5,8 +5,8 @@ import json import logging import pygerduty -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -78,13 +78,13 @@ class PagerDutyBackend(AWXBaseEmailBackend, CustomNotificationBase): except Exception as e: if not self.fail_silently: raise - logger.error(smart_text(_("Exception connecting to PagerDuty: {}").format(e))) + logger.error(smart_str(_("Exception connecting to PagerDuty: {}").format(e))) for m in messages: try: pager.trigger_incident(m.recipients()[0], description=m.subject, details=m.body, client=m.from_email) sent_messages += 1 except Exception as e: - logger.error(smart_text(_("Exception sending messages: {}").format(e))) + logger.error(smart_str(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/rocketchat_backend.py b/awx/main/notifications/rocketchat_backend.py index 9092b90f17..67155233c7 100644 --- a/awx/main/notifications/rocketchat_backend.py +++ b/awx/main/notifications/rocketchat_backend.py @@ -5,8 +5,8 @@ import logging import requests import json -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.utils import get_awx_http_client_headers @@ -44,8 +44,8 @@ class RocketChatBackend(AWXBaseEmailBackend, CustomNotificationBase): ) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification rocket.chat: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification rocket.chat: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification rocket.chat: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification rocket.chat: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py index 73364dc037..d1016526aa 100644 --- a/awx/main/notifications/slack_backend.py +++ b/awx/main/notifications/slack_backend.py @@ -5,8 +5,8 @@ import logging from slack_sdk import WebClient from slack_sdk.errors import SlackApiError -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -53,7 +53,7 @@ class SlackBackend(AWXBaseEmailBackend, CustomNotificationBase): else: raise RuntimeError("Slack Notification unable to send {}: {} ({})".format(r, m.subject, response['error'])) except SlackApiError as e: - logger.error(smart_text(_("Exception sending messages: {}").format(e))) + logger.error(smart_str(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py index 0b730a56b2..1f54d603ac 100644 --- a/awx/main/notifications/twilio_backend.py +++ b/awx/main/notifications/twilio_backend.py @@ -5,8 +5,8 @@ import logging from twilio.rest import Client -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.notifications.custom_notification_base import CustomNotificationBase @@ -37,14 +37,14 @@ class TwilioBackend(AWXBaseEmailBackend, CustomNotificationBase): except Exception as e: if not self.fail_silently: raise - logger.error(smart_text(_("Exception connecting to Twilio: {}").format(e))) + logger.error(smart_str(_("Exception connecting to Twilio: {}").format(e))) for m in messages: try: connection.messages.create(to=m.to, from_=m.from_email, body=m.subject) sent_messages += 1 except Exception as e: - logger.error(smart_text(_("Exception sending messages: {}").format(e))) + logger.error(smart_str(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py index 342184ecf2..30518e0714 100644 --- a/awx/main/notifications/webhook_backend.py +++ b/awx/main/notifications/webhook_backend.py @@ -5,8 +5,8 @@ import json import logging import requests -from django.utils.encoding import smart_text -from django.utils.translation import ugettext_lazy as _ +from django.utils.encoding import smart_str +from django.utils.translation import gettext_lazy as _ from awx.main.notifications.base import AWXBaseEmailBackend from awx.main.utils import get_awx_http_client_headers @@ -76,8 +76,8 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase): verify=(not self.disable_ssl_verification), ) if r.status_code >= 400: - logger.error(smart_text(_("Error sending notification webhook: {}").format(r.status_code))) + logger.error(smart_str(_("Error sending notification webhook: {}").format(r.status_code))) if not self.fail_silently: - raise Exception(smart_text(_("Error sending notification webhook: {}").format(r.status_code))) + raise Exception(smart_str(_("Error sending notification webhook: {}").format(r.status_code))) sent_messages += 1 return sent_messages diff --git a/awx/main/scheduler/dag_workflow.py b/awx/main/scheduler/dag_workflow.py index 39995f437c..c2afba68ad 100644 --- a/awx/main/scheduler/dag_workflow.py +++ b/awx/main/scheduler/dag_workflow.py @@ -1,5 +1,5 @@ -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str # Python from awx.main.models import ( @@ -171,7 +171,7 @@ class WorkflowDAG(SimpleDAG): parms['node_status'] = ",".join(["({},{})".format(id, status) for id, status in failed_path_nodes_id_status]) if len(failed_unified_job_template_node_ids) > 0: parms['no_ufjt'] = ",".join(failed_unified_job_template_node_ids) - return True, smart_text(s.format(**parms)) + return True, smart_str(s.format(**parms)) return False, None r''' diff --git a/awx/main/scheduler/kubernetes.py b/awx/main/scheduler/kubernetes.py index 6e36226df5..8566ca4864 100644 --- a/awx/main/scheduler/kubernetes.py +++ b/awx/main/scheduler/kubernetes.py @@ -7,7 +7,7 @@ from urllib import parse as urlparse from django.conf import settings from kubernetes import client, config from django.utils.functional import cached_property -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from awx.main.utils.common import parse_yaml_or_json, deepmerge from awx.main.utils.execution_environments import get_default_pod_spec diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index beb4840c9a..3b5e495d6b 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -10,7 +10,7 @@ from types import SimpleNamespace # Django from django.db import transaction, connection -from django.utils.translation import ugettext_lazy as _, gettext_noop +from django.utils.translation import gettext_lazy as _, gettext_noop from django.utils.timezone import now as tz_now from django.conf import settings diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py index 7d2057d62d..9305b898f3 100644 --- a/awx/main/tasks/jobs.py +++ b/awx/main/tasks/jobs.py @@ -81,7 +81,7 @@ from awx.main.utils.handlers import SpecialInventoryHandler from awx.main.tasks.system import handle_success_and_failure_notifications, update_smart_memberships_for_inventory, update_inventory_computed_fields from awx.main.utils.update_model import update_model from rest_framework.exceptions import PermissionDenied -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ logger = logging.getLogger('awx.main.tasks.jobs') diff --git a/awx/main/tasks/system.py b/awx/main/tasks/system.py index 43ac6c2b26..ac9bb21e99 100644 --- a/awx/main/tasks/system.py +++ b/awx/main/tasks/system.py @@ -18,7 +18,7 @@ from django.db.models.fields.related import ForeignKey from django.utils.timezone import now from django.utils.encoding import smart_str from django.contrib.auth.models import User -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_noop from django.core.cache import cache from django.core.exceptions import ObjectDoesNotExist diff --git a/awx/main/tests/docs/test_swagger_generation.py b/awx/main/tests/docs/test_swagger_generation.py index e1257cf889..658d8ad2d4 100644 --- a/awx/main/tests/docs/test_swagger_generation.py +++ b/awx/main/tests/docs/test_swagger_generation.py @@ -5,7 +5,7 @@ import re from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder from django.utils.functional import Promise -from django.utils.encoding import force_text +from django.utils.encoding import force_str from openapi_codec.encode import generate_swagger_object import pytest @@ -16,9 +16,9 @@ from awx.api.versioning import drf_reverse class i18nEncoder(DjangoJSONEncoder): def default(self, obj): if isinstance(obj, Promise): - return force_text(obj) + return force_str(obj) if type(obj) == bytes: - return force_text(obj) + return force_str(obj) return super(i18nEncoder, self).default(obj) diff --git a/awx/main/tests/unit/scheduler/test_dag_workflow.py b/awx/main/tests/unit/scheduler/test_dag_workflow.py index 18c3d193f7..a3225b76a3 100644 --- a/awx/main/tests/unit/scheduler/test_dag_workflow.py +++ b/awx/main/tests/unit/scheduler/test_dag_workflow.py @@ -2,8 +2,8 @@ import pytest import uuid import os -from django.utils.translation import ugettext_lazy as _ -from django.utils.encoding import smart_text +from django.utils.translation import gettext_lazy as _ +from django.utils.encoding import smart_str from awx.main.scheduler.dag_workflow import WorkflowDAG @@ -468,7 +468,7 @@ class TestIsWorkflowDone: assert g.is_workflow_done() is True assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)" " missing unified job template and error handling path []." @@ -484,7 +484,7 @@ class TestIsWorkflowDone: assert g.is_workflow_done() is True assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) []. Workflow job node(s) missing" " unified job template and error handling path [{}]." ).format(nodes[2].id) @@ -500,7 +500,7 @@ class TestIsWorkflowDone: assert g.is_workflow_done() is True assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) []. Workflow job node(s) missing" " unified job template and error handling path [{}]." ).format(nodes[0].id) @@ -512,7 +512,7 @@ class TestIsWorkflowDone: assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)" " missing unified job template and error handling path []." @@ -525,7 +525,7 @@ class TestIsWorkflowDone: assert g.has_workflow_failed() == ( True, - smart_text( + smart_str( _( "No error handling path for workflow job node(s) [({},{})]. Workflow job node(s)" " missing unified job template and error handling path []." diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index 49885d70c7..a88113d2e4 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -19,7 +19,7 @@ from functools import reduce, wraps # Django from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist from django.utils.dateparse import parse_datetime -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.utils.functional import cached_property from django.db import connection from django.db.models.fields.related import ForeignObjectRel, ManyToManyField diff --git a/awx/main/utils/licensing.py b/awx/main/utils/licensing.py index eeae581655..bec953f822 100644 --- a/awx/main/utils/licensing.py +++ b/awx/main/utils/licensing.py @@ -33,7 +33,7 @@ from cryptography import x509 # Django from django.conf import settings -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ MAX_INSTANCES = 9999999 diff --git a/awx/main/validators.py b/awx/main/validators.py index 872eabafdc..4cd0e25459 100644 --- a/awx/main/validators.py +++ b/awx/main/validators.py @@ -6,7 +6,7 @@ import base64 import re # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.core.exceptions import ValidationError # REST framework diff --git a/awx/main/views.py b/awx/main/views.py index bb6c43b6bf..8ff612e8ba 100644 --- a/awx/main/views.py +++ b/awx/main/views.py @@ -7,7 +7,7 @@ import json from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.utils.html import format_html -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.views.decorators.csrf import csrf_exempt # Django REST Framework diff --git a/awx/sso/apps.py b/awx/sso/apps.py index 45c00e871b..4d09b7acf6 100644 --- a/awx/sso/apps.py +++ b/awx/sso/apps.py @@ -1,6 +1,6 @@ # Django from django.apps import AppConfig -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class SSOConfig(AppConfig): diff --git a/awx/sso/backends.py b/awx/sso/backends.py index 727cacab20..20d6285c14 100644 --- a/awx/sso/backends.py +++ b/awx/sso/backends.py @@ -13,7 +13,7 @@ from django.dispatch import receiver from django.contrib.auth.models import User from django.conf import settings as django_settings from django.core.signals import setting_changed -from django.utils.encoding import force_text +from django.utils.encoding import force_str # django-auth-ldap from django_auth_ldap.backend import LDAPSettings as BaseLDAPSettings @@ -200,7 +200,7 @@ class RADIUSBackend(BaseRADIUSBackend): return user def get_django_user(self, username, password=None): - return _get_or_set_enterprise_user(force_text(username), force_text(password), 'radius') + return _get_or_set_enterprise_user(force_str(username), force_str(password), 'radius') class TACACSPlusBackend(object): diff --git a/awx/sso/conf.py b/awx/sso/conf.py index 2faf342934..29d7f401d3 100644 --- a/awx/sso/conf.py +++ b/awx/sso/conf.py @@ -5,7 +5,7 @@ import urllib.parse as urlparse # Django from django.conf import settings from django.urls import reverse -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django REST Framework from rest_framework import serializers diff --git a/awx/sso/fields.py b/awx/sso/fields.py index 0e4af005c9..e2c87d97f2 100644 --- a/awx/sso/fields.py +++ b/awx/sso/fields.py @@ -11,7 +11,7 @@ import ldap import awx # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # Django Auth LDAP import django_auth_ldap.config diff --git a/awx/sso/models.py b/awx/sso/models.py index 95da1b82c7..28eb23857f 100644 --- a/awx/sso/models.py +++ b/awx/sso/models.py @@ -4,7 +4,7 @@ # Django from django.db import models from django.contrib.auth.models import User -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class UserEnterpriseAuth(models.Model): diff --git a/awx/sso/pipeline.py b/awx/sso/pipeline.py index 3a63391fe8..85bfd499fd 100644 --- a/awx/sso/pipeline.py +++ b/awx/sso/pipeline.py @@ -11,7 +11,7 @@ from social_core.exceptions import AuthException # Django from django.core.exceptions import ObjectDoesNotExist -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.db.models import Q diff --git a/awx/sso/validators.py b/awx/sso/validators.py index 821abc3b15..478b86b36f 100644 --- a/awx/sso/validators.py +++ b/awx/sso/validators.py @@ -6,7 +6,7 @@ import ldap # Django from django.core.exceptions import ValidationError -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ __all__ = [ 'validate_ldap_dn', diff --git a/awx/sso/views.py b/awx/sso/views.py index 2f3a448af9..67921b2fa4 100644 --- a/awx/sso/views.py +++ b/awx/sso/views.py @@ -10,7 +10,7 @@ from django.urls import reverse from django.http import HttpResponse from django.views.generic import View from django.views.generic.base import RedirectView -from django.utils.encoding import smart_text +from django.utils.encoding import smart_str from awx.api.serializers import UserSerializer from rest_framework.renderers import JSONRenderer from django.conf import settings @@ -40,10 +40,10 @@ class CompleteView(BaseRedirectView): def dispatch(self, request, *args, **kwargs): response = super(CompleteView, self).dispatch(request, *args, **kwargs) if self.request.user and self.request.user.is_authenticated: - logger.info(smart_text(u"User {} logged in".format(self.request.user.username))) + logger.info(smart_str(u"User {} logged in".format(self.request.user.username))) response.set_cookie('userLoggedIn', 'true') current_user = UserSerializer(self.request.user) - current_user = smart_text(JSONRenderer().render(current_user.data)) + current_user = smart_str(JSONRenderer().render(current_user.data)) current_user = urllib.parse.quote('%s' % current_user, '') response.set_cookie('current_user', current_user, secure=settings.SESSION_COOKIE_SECURE or None) response.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) diff --git a/awx/ui/apps.py b/awx/ui/apps.py index 40943c6f53..d567e64b80 100644 --- a/awx/ui/apps.py +++ b/awx/ui/apps.py @@ -1,6 +1,6 @@ # Django from django.apps import AppConfig -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class UIConfig(AppConfig): diff --git a/awx/ui/conf.py b/awx/ui/conf.py index 34208f2339..9f1cef04fc 100644 --- a/awx/ui/conf.py +++ b/awx/ui/conf.py @@ -2,7 +2,7 @@ # All Rights Reserved. # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.conf import register, fields diff --git a/awx/ui/fields.py b/awx/ui/fields.py index d9b46890ff..2200de3417 100644 --- a/awx/ui/fields.py +++ b/awx/ui/fields.py @@ -7,7 +7,7 @@ import binascii import re # Django -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ # AWX from awx.conf import fields diff --git a/awx/ui/urls.py b/awx/ui/urls.py index 7d524d82be..068f2a020a 100644 --- a/awx/ui/urls.py +++ b/awx/ui/urls.py @@ -1,5 +1,5 @@ from django.conf.urls import url -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.views.generic.base import TemplateView from awx.main.utils.licensing import server_product_name From b852baaa39035b4836b1b5498dc702bf0f3bbd9b Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 31 Jan 2022 13:17:45 -0500 Subject: [PATCH 079/125] Fix up logger .warn() calls to use .warning() instead This is a usage that was deprecated in Python 3.0. --- awx/api/generics.py | 2 +- awx/api/serializers.py | 2 +- awx/main/analytics/broadcast_websocket.py | 2 +- awx/main/consumers.py | 4 ++-- awx/main/dispatch/control.py | 2 +- awx/main/dispatch/periodic.py | 4 ++-- awx/main/dispatch/pool.py | 6 ++--- awx/main/dispatch/worker/base.py | 6 ++--- .../management/commands/inventory_import.py | 4 ++-- awx/main/managers.py | 2 +- .../0150_rename_inv_sources_inv_updates.py | 10 ++++----- awx/main/migrations/_hg_removal.py | 2 +- awx/main/migrations/_inventory_source.py | 4 ++-- awx/main/models/ha.py | 2 +- awx/main/models/notifications.py | 2 +- awx/main/models/schedules.py | 2 +- awx/main/models/unified_jobs.py | 4 ++-- awx/main/routing.py | 2 +- awx/main/scheduler/task_manager.py | 2 +- awx/main/tasks/callback.py | 2 +- awx/main/tasks/jobs.py | 2 +- awx/main/tasks/receptor.py | 8 +++---- awx/main/tasks/system.py | 22 +++++++++---------- awx/main/tests/unit/test_tasks.py | 2 +- awx/main/utils/reload.py | 2 +- awx/main/wsbroadcast.py | 18 +++++++-------- awx/sso/backends.py | 6 ++--- .../test_get_or_set_enterprise_user.py | 4 ++-- 28 files changed, 65 insertions(+), 65 deletions(-) diff --git a/awx/api/generics.py b/awx/api/generics.py index 14ebde7fce..c3c72ba30a 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -104,7 +104,7 @@ class LoggedLoginView(auth_views.LoginView): return ret else: if 'username' in self.request.POST: - logger.warn(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None)))) + logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None)))) ret.status_code = 401 return ret diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 8833f1587e..4cd4f01fb6 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -5078,7 +5078,7 @@ class ActivityStreamSerializer(BaseSerializer): try: return json.loads(obj.changes) except Exception: - logger.warn("Error deserializing activity stream json changes") + logger.warning("Error deserializing activity stream json changes") return {} def get_object_association(self, obj): diff --git a/awx/main/analytics/broadcast_websocket.py b/awx/main/analytics/broadcast_websocket.py index ff4bcb4fa1..df1582c9b9 100644 --- a/awx/main/analytics/broadcast_websocket.py +++ b/awx/main/analytics/broadcast_websocket.py @@ -89,7 +89,7 @@ class BroadcastWebsocketStatsManager: await asyncio.sleep(settings.BROADCAST_WEBSOCKET_STATS_POLL_RATE_SECONDS) except Exception as e: - logger.warn(e) + logger.warning(e) await asyncio.sleep(settings.BROADCAST_WEBSOCKET_STATS_POLL_RATE_SECONDS) self.start() diff --git a/awx/main/consumers.py b/awx/main/consumers.py index 21ebe9d771..ad1740c362 100644 --- a/awx/main/consumers.py +++ b/awx/main/consumers.py @@ -65,7 +65,7 @@ class WebsocketSecretAuthHelper: nonce_parsed = int(nonce_parsed) nonce_diff = now - nonce_parsed if abs(nonce_diff) > nonce_tolerance: - logger.warn(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.") + logger.warning(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.") raise ValueError(f"Potential replay attack or machine(s) time out of sync by {nonce_diff} seconds.") return True @@ -85,7 +85,7 @@ class BroadcastConsumer(AsyncJsonWebsocketConsumer): try: WebsocketSecretAuthHelper.is_authorized(self.scope) except Exception: - logger.warn(f"client '{self.channel_name}' failed to authorize against the broadcast endpoint.") + logger.warning(f"client '{self.channel_name}' failed to authorize against the broadcast endpoint.") await self.close() return diff --git a/awx/main/dispatch/control.py b/awx/main/dispatch/control.py index e5952f02bf..b1eb2281c9 100644 --- a/awx/main/dispatch/control.py +++ b/awx/main/dispatch/control.py @@ -42,7 +42,7 @@ class Control(object): return f"reply_to_{str(uuid.uuid4()).replace('-','_')}" def control_with_reply(self, command, timeout=5): - logger.warn('checking {} {} for {}'.format(self.service, command, self.queuename)) + logger.warning('checking {} {} for {}'.format(self.service, command, self.queuename)) reply_queue = Control.generate_reply_queue_name() self.result = None diff --git a/awx/main/dispatch/periodic.py b/awx/main/dispatch/periodic.py index 9ff6dd2570..bfeff05fca 100644 --- a/awx/main/dispatch/periodic.py +++ b/awx/main/dispatch/periodic.py @@ -19,13 +19,13 @@ class Scheduler(Scheduler): def run(): ppid = os.getppid() - logger.warn('periodic beat started') + logger.warning('periodic beat started') while True: if os.getppid() != ppid: # if the parent PID changes, this process has been orphaned # via e.g., segfault or sigkill, we should exit too pid = os.getpid() - logger.warn(f'periodic beat exiting gracefully pid:{pid}') + logger.warning(f'periodic beat exiting gracefully pid:{pid}') raise SystemExit() try: for conn in connections.all(): diff --git a/awx/main/dispatch/pool.py b/awx/main/dispatch/pool.py index f1f46363f3..21b1e6b9be 100644 --- a/awx/main/dispatch/pool.py +++ b/awx/main/dispatch/pool.py @@ -142,7 +142,7 @@ class PoolWorker(object): # when this occurs, it's _fine_ to ignore this KeyError because # the purpose of self.managed_tasks is to just track internal # state of which events are *currently* being processed. - logger.warn('Event UUID {} appears to be have been duplicated.'.format(uuid)) + logger.warning('Event UUID {} appears to be have been duplicated.'.format(uuid)) @property def current_task(self): @@ -291,8 +291,8 @@ class WorkerPool(object): pass except Exception: tb = traceback.format_exc() - logger.warn("could not write to queue %s" % preferred_queue) - logger.warn("detail: {}".format(tb)) + logger.warning("could not write to queue %s" % preferred_queue) + logger.warning("detail: {}".format(tb)) write_attempt_order.append(preferred_queue) logger.error("could not write payload to any queue, attempted order: {}".format(write_attempt_order)) return None diff --git a/awx/main/dispatch/worker/base.py b/awx/main/dispatch/worker/base.py index 193fb778bb..6965416c94 100644 --- a/awx/main/dispatch/worker/base.py +++ b/awx/main/dispatch/worker/base.py @@ -60,7 +60,7 @@ class AWXConsumerBase(object): return f'listening on {self.queues}' def control(self, body): - logger.warn(f'Received control signal:\n{body}') + logger.warning(f'Received control signal:\n{body}') control = body.get('control') if control in ('status', 'running'): reply_queue = body['reply_to'] @@ -118,7 +118,7 @@ class AWXConsumerBase(object): def stop(self, signum, frame): self.should_stop = True - logger.warn('received {}, stopping'.format(signame(signum))) + logger.warning('received {}, stopping'.format(signame(signum))) self.worker.on_stop() raise SystemExit() @@ -153,7 +153,7 @@ class AWXConsumerPG(AWXConsumerBase): if self.should_stop: return except psycopg2.InterfaceError: - logger.warn("Stale Postgres message bus connection, reconnecting") + logger.warning("Stale Postgres message bus connection, reconnecting") continue diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index f710229e7e..78acec423d 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -79,13 +79,13 @@ class AnsibleInventoryLoader(object): ee = get_default_execution_environment() if settings.IS_K8S: - logger.warn('This command is not able to run on kubernetes-based deployment. This action should be done using the API.') + logger.warning('This command is not able to run on kubernetes-based deployment. This action should be done using the API.') sys.exit(1) if ee.credential: process = subprocess.run(['podman', 'image', 'exists', ee.image], capture_output=True) if process.returncode != 0: - logger.warn( + logger.warning( f'The default execution environment (id={ee.id}, name={ee.name}, image={ee.image}) is not available on this node. ' 'The image needs to be available locally before using this command, due to registry authentication. ' 'To pull this image, either run a job on this node or manually pull the image.' diff --git a/awx/main/managers.py b/awx/main/managers.py index 2614193fe1..4d58f7bc55 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -247,7 +247,7 @@ class InstanceGroupManager(models.Manager): if t.controller_node: control_groups = instance_ig_mapping.get(t.controller_node, []) if not control_groups: - logger.warn(f"No instance group found for {t.controller_node}, capacity consumed may be innaccurate.") + logger.warning(f"No instance group found for {t.controller_node}, capacity consumed may be innaccurate.") if t.status == 'waiting' or (not t.execution_node and not t.is_container_group_task): # Subtract capacity from any peer groups that share instances diff --git a/awx/main/migrations/0150_rename_inv_sources_inv_updates.py b/awx/main/migrations/0150_rename_inv_sources_inv_updates.py index 11c4b1b3f9..596d1f81f2 100644 --- a/awx/main/migrations/0150_rename_inv_sources_inv_updates.py +++ b/awx/main/migrations/0150_rename_inv_sources_inv_updates.py @@ -15,10 +15,10 @@ def forwards(apps, schema_editor): r = InventoryUpdate.objects.filter(source='tower').update(source='controller') if r: - logger.warn(f'Renamed {r} tower inventory updates to controller') + logger.warning(f'Renamed {r} tower inventory updates to controller') InventorySource.objects.filter(source='tower').update(source='controller') if r: - logger.warn(f'Renamed {r} tower inventory sources to controller') + logger.warning(f'Renamed {r} tower inventory sources to controller') CredentialType = apps.get_model('main', 'CredentialType') @@ -32,7 +32,7 @@ def forwards(apps, schema_editor): registry_type = ManagedCredentialType.registry.get('controller') if not registry_type: raise RuntimeError('Excpected to find controller credential, this may need to be edited in the future!') - logger.warn('Renaming the Ansible Tower credential type for existing install') + logger.warning('Renaming the Ansible Tower credential type for existing install') tower_type.name = registry_type.name # sensitive to translations tower_type.namespace = 'controller' # if not done, will error setup_tower_managed_defaults tower_type.save(update_fields=['name', 'namespace']) @@ -46,10 +46,10 @@ def backwards(apps, schema_editor): r = InventoryUpdate.objects.filter(source='controller').update(source='tower') if r: - logger.warn(f'Renamed {r} controller inventory updates to tower') + logger.warning(f'Renamed {r} controller inventory updates to tower') r = InventorySource.objects.filter(source='controller').update(source='tower') if r: - logger.warn(f'Renamed {r} controller inventory sources to tower') + logger.warning(f'Renamed {r} controller inventory sources to tower') CredentialType = apps.get_model('main', 'CredentialType') diff --git a/awx/main/migrations/_hg_removal.py b/awx/main/migrations/_hg_removal.py index e384ea5413..76828ef474 100644 --- a/awx/main/migrations/_hg_removal.py +++ b/awx/main/migrations/_hg_removal.py @@ -14,4 +14,4 @@ def delete_hg_scm(apps, schema_editor): update_ct = Project.objects.filter(scm_type='hg').update(scm_type='') if update_ct: - logger.warn('Changed {} mercurial projects to manual, deprecation period ended'.format(update_ct)) + logger.warning('Changed {} mercurial projects to manual, deprecation period ended'.format(update_ct)) diff --git a/awx/main/migrations/_inventory_source.py b/awx/main/migrations/_inventory_source.py index ef7cbb088c..023a7ee072 100644 --- a/awx/main/migrations/_inventory_source.py +++ b/awx/main/migrations/_inventory_source.py @@ -35,7 +35,7 @@ def _get_instance_id_for_upgrade(host, new_id): return None if len(new_id) > 255: # this should never happen - logger.warn('Computed instance id "{}"" for host {}-{} is too long'.format(new_id_value, host.name, host.pk)) + logger.warning('Computed instance id "{}"" for host {}-{} is too long'.format(new_id_value, host.name, host.pk)) return None return new_id_value @@ -47,7 +47,7 @@ def set_new_instance_id(apps, source, new_id): id_from_settings = getattr(settings, '{}_INSTANCE_ID_VAR'.format(source.upper())) if id_from_settings != new_id: # User applied an instance ID themselves, so nope on out of there - logger.warn('You have an instance ID set for {}, not migrating'.format(source)) + logger.warning('You have an instance ID set for {}, not migrating'.format(source)) return logger.debug('Migrating inventory instance_id for {} to {}'.format(source, new_id)) Host = apps.get_model('main', 'Host') diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index 36a3b7ce9e..6182b2ce7e 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -247,7 +247,7 @@ class Instance(HasPolicyEditsMixin, BaseModel): if uuid is not None and self.uuid != uuid: if self.uuid is not None: - logger.warn(f'Self-reported uuid of {self.hostname} changed from {self.uuid} to {uuid}') + logger.warning(f'Self-reported uuid of {self.hostname} changed from {self.uuid} to {uuid}') self.uuid = uuid update_fields.append('uuid') diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index 6520c86211..d73591070c 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -515,7 +515,7 @@ class JobNotificationMixin(object): try: notification_templates = self.get_notification_templates() except Exception: - logger.warn("No notification template defined for emitting notification") + logger.warning("No notification template defined for emitting notification") return if not notification_templates: diff --git a/awx/main/models/schedules.py b/awx/main/models/schedules.py index 9793c216ff..c3fae526f1 100644 --- a/awx/main/models/schedules.py +++ b/awx/main/models/schedules.py @@ -103,7 +103,7 @@ class Schedule(PrimordialModel, LaunchTimeConfig): for zone in all_zones: if fname.endswith(zone): return zone - logger.warn('Could not detect valid zoneinfo for {}'.format(self.rrule)) + logger.warning('Could not detect valid zoneinfo for {}'.format(self.rrule)) return '' @property diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index ef374f82e6..f08e37e06c 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -357,7 +357,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn validated_kwargs = kwargs.copy() if unallowed_fields: if parent_field_name is None: - logger.warn('Fields {} are not allowed as overrides to spawn from {}.'.format(', '.join(unallowed_fields), self)) + logger.warning('Fields {} are not allowed as overrides to spawn from {}.'.format(', '.join(unallowed_fields), self)) for f in unallowed_fields: validated_kwargs.pop(f) @@ -1205,7 +1205,7 @@ class UnifiedJob( try: extra_data_dict = parse_yaml_or_json(extra_data, silent_failure=False) except Exception as e: - logger.warn("Exception deserializing extra vars: " + str(e)) + logger.warning("Exception deserializing extra vars: " + str(e)) evars = self.extra_vars_dict evars.update(extra_data_dict) self.update_fields(extra_vars=json.dumps(evars)) diff --git a/awx/main/routing.py b/awx/main/routing.py index 6ba58e68c6..f470541443 100644 --- a/awx/main/routing.py +++ b/awx/main/routing.py @@ -21,7 +21,7 @@ class AWXProtocolTypeRouter(ProtocolTypeRouter): logger.debug(f"cleaning up Redis key {k}") r.delete(k) except redis.exceptions.RedisError as e: - logger.warn("encountered an error communicating with redis.") + logger.warning("encountered an error communicating with redis.") raise e super().__init__(*args, **kwargs) diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index 3b5e495d6b..fba17917cf 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -574,7 +574,7 @@ class TaskManager: timeout_message = _("The approval node {name} ({pk}) has expired after {timeout} seconds.").format( name=task.name, pk=task.pk, timeout=task.timeout ) - logger.warn(timeout_message) + logger.warning(timeout_message) task.timed_out = True task.status = 'failed' task.send_approval_notification('timed_out') diff --git a/awx/main/tasks/callback.py b/awx/main/tasks/callback.py index ccd9c39815..d9b04c0b15 100644 --- a/awx/main/tasks/callback.py +++ b/awx/main/tasks/callback.py @@ -154,7 +154,7 @@ class RunnerCallback: if self.instance.cancel_flag or self.instance.status == 'canceled': cancel_wait = (now() - self.instance.modified).seconds if self.instance.modified else 0 if cancel_wait > 5: - logger.warn('Request to cancel {} took {} seconds to complete.'.format(self.instance.log_format, cancel_wait)) + logger.warning('Request to cancel {} took {} seconds to complete.'.format(self.instance.log_format, cancel_wait)) return True return False diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py index 9305b898f3..6fb1613f0f 100644 --- a/awx/main/tasks/jobs.py +++ b/awx/main/tasks/jobs.py @@ -169,7 +169,7 @@ class BaseTask(object): # mount_option validation via performed via API, but since this can be overriden via settings.py if mount_option not in CONTAINER_VOLUMES_MOUNT_TYPES: mount_option = 'z' - logger.warn(f'The path {this_path} has volume mount type {mount_option} which is not supported. Using "z" instead.') + logger.warning(f'The path {this_path} has volume mount type {mount_option} which is not supported. Using "z" instead.') params['container_volume_mounts'].append(f'{src}:{dest}:{mount_option}') elif this_path.count(':') == MAX_ISOLATED_PATH_COLON_DELIMITER - 1: diff --git a/awx/main/tasks/receptor.py b/awx/main/tasks/receptor.py index 5d58217b6f..c2028dbb36 100644 --- a/awx/main/tasks/receptor.py +++ b/awx/main/tasks/receptor.py @@ -164,7 +164,7 @@ def run_until_complete(node, timing_data=None, **kwargs): if settings.RECEPTOR_RELEASE_WORK: res = receptor_ctl.simple_command(f"work release {unit_id}") if res != {'released': unit_id}: - logger.warn(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}') + logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}') receptor_ctl.close() @@ -358,9 +358,9 @@ class AWXReceptorJob: logger.exception(f'An error was encountered while getting status for work unit {self.unit_id}') if 'exceeded quota' in detail: - logger.warn(detail) + logger.warning(detail) log_name = self.task.instance.log_format - logger.warn(f"Could not launch pod for {log_name}. Exceeded quota.") + logger.warning(f"Could not launch pod for {log_name}. Exceeded quota.") self.task.update_model(self.task.instance.pk, status='pending') return # If ansible-runner ran, but an error occured at runtime, the traceback information @@ -380,7 +380,7 @@ class AWXReceptorJob: self.task.instance.result_traceback = detail self.task.instance.save(update_fields=['result_traceback']) else: - logger.warn(f'No result details or output from {self.task.instance.log_format}, status:\n{state_name}') + logger.warning(f'No result details or output from {self.task.instance.log_format}, status:\n{state_name}') except Exception: raise RuntimeError(detail) diff --git a/awx/main/tasks/system.py b/awx/main/tasks/system.py index ac9bb21e99..4e1b9eceed 100644 --- a/awx/main/tasks/system.py +++ b/awx/main/tasks/system.py @@ -374,15 +374,15 @@ def cluster_node_health_check(node): Used for the health check endpoint, refreshes the status of the instance, but must be ran on target node """ if node == '': - logger.warn('Local health check incorrectly called with blank string') + logger.warning('Local health check incorrectly called with blank string') return elif node != settings.CLUSTER_HOST_ID: - logger.warn(f'Local health check for {node} incorrectly sent to {settings.CLUSTER_HOST_ID}') + logger.warning(f'Local health check for {node} incorrectly sent to {settings.CLUSTER_HOST_ID}') return try: this_inst = Instance.objects.me() except Instance.DoesNotExist: - logger.warn(f'Instance record for {node} missing, could not check capacity.') + logger.warning(f'Instance record for {node} missing, could not check capacity.') return this_inst.local_health_check() @@ -390,12 +390,12 @@ def cluster_node_health_check(node): @task(queue=get_local_queuename) def execution_node_health_check(node): if node == '': - logger.warn('Remote health check incorrectly called with blank string') + logger.warning('Remote health check incorrectly called with blank string') return try: instance = Instance.objects.get(hostname=node) except Instance.DoesNotExist: - logger.warn(f'Instance record for {node} missing, could not check capacity.') + logger.warning(f'Instance record for {node} missing, could not check capacity.') return if instance.node_type != 'execution': @@ -416,7 +416,7 @@ def execution_node_health_check(node): if data['errors']: formatted_error = "\n".join(data["errors"]) if prior_capacity: - logger.warn(f'Health check marking execution node {node} as lost, errors:\n{formatted_error}') + logger.warning(f'Health check marking execution node {node} as lost, errors:\n{formatted_error}') else: logger.info(f'Failed to find capacity of new or lost execution node {node}, errors:\n{formatted_error}') else: @@ -441,7 +441,7 @@ def inspect_execution_nodes(instance_list): if hostname in node_lookup: instance = node_lookup[hostname] else: - logger.warn(f"Unrecognized node advertising on mesh: {hostname}") + logger.warning(f"Unrecognized node advertising on mesh: {hostname}") continue # Control-plane nodes are dealt with via local_health_check instead. @@ -466,7 +466,7 @@ def inspect_execution_nodes(instance_list): # if the instance *was* lost, but has appeared again, # attempt to re-establish the initial capacity and version # check - logger.warn(f'Execution node attempting to rejoin as instance {hostname}.') + logger.warning(f'Execution node attempting to rejoin as instance {hostname}.') execution_node_health_check.apply_async([hostname]) elif instance.capacity == 0 and instance.enabled: # nodes with proven connection but need remediation run health checks are reduced frequency @@ -640,7 +640,7 @@ def awx_periodic_scheduler(): template = schedule.unified_job_template schedule.update_computed_fields() # To update next_run timestamp. if template.cache_timeout_blocked: - logger.warn("Cache timeout is in the future, bypassing schedule for template %s" % str(template.id)) + logger.warning("Cache timeout is in the future, bypassing schedule for template %s" % str(template.id)) continue try: job_kwargs = schedule.get_job_kwargs() @@ -694,7 +694,7 @@ def handle_work_error(task_id, *args, **kwargs): instance = UnifiedJob.get_instance_by_type(each_task['type'], each_task['id']) if not instance: # Unknown task type - logger.warn("Unknown task type: {}".format(each_task['type'])) + logger.warning("Unknown task type: {}".format(each_task['type'])) continue except ObjectDoesNotExist: logger.warning('Missing {} `{}` in error callback.'.format(each_task['type'], each_task['id'])) @@ -741,7 +741,7 @@ def handle_success_and_failure_notifications(job_id): time.sleep(1) uj = UnifiedJob.objects.get(pk=job_id) - logger.warn(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") + logger.warning(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") @task(queue=get_local_queuename) diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index f2d617abb8..0a19c684fc 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -1947,7 +1947,7 @@ def test_notification_job_not_finished(logging_getLogger, mocker): with mocker.patch('awx.main.models.UnifiedJob.objects.get', uj): system.handle_success_and_failure_notifications(1) - assert logger.warn.called_with(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") + assert logger.warning.called_with(f"Failed to even try to send notifications for job '{uj}' due to job not being in finished state.") def test_notification_job_finished(mocker): diff --git a/awx/main/utils/reload.py b/awx/main/utils/reload.py index 6651fcf44d..a7c2a1ed99 100644 --- a/awx/main/utils/reload.py +++ b/awx/main/utils/reload.py @@ -40,5 +40,5 @@ def supervisor_service_command(command, service='*', communicate=True): def stop_local_services(communicate=True): - logger.warn('Stopping services on this node in response to user action') + logger.warning('Stopping services on this node in response to user action') supervisor_service_command(command='stop', communicate=communicate) diff --git a/awx/main/wsbroadcast.py b/awx/main/wsbroadcast.py index cc806e7f26..47006adc9d 100644 --- a/awx/main/wsbroadcast.py +++ b/awx/main/wsbroadcast.py @@ -92,7 +92,7 @@ class WebsocketTask: if attempt > 0: await asyncio.sleep(settings.BROADCAST_WEBSOCKET_RECONNECT_RETRY_RATE_SECONDS) except asyncio.CancelledError: - logger.warn(f"Connection from {self.name} to {self.remote_host} cancelled") + logger.warning(f"Connection from {self.name} to {self.remote_host} cancelled") raise uri = f"{self.protocol}://{self.remote_host}:{self.remote_port}/websocket/{self.endpoint}/" @@ -109,18 +109,18 @@ class WebsocketTask: except asyncio.CancelledError: # TODO: Check if connected and disconnect # Possibly use run_until_complete() if disconnect is async - logger.warn(f"Connection from {self.name} to {self.remote_host} cancelled.") + logger.warning(f"Connection from {self.name} to {self.remote_host} cancelled.") self.stats.record_connection_lost() raise except client_exceptions.ClientConnectorError as e: - logger.warn(f"Connection from {self.name} to {self.remote_host} failed: '{e}'.") + logger.warning(f"Connection from {self.name} to {self.remote_host} failed: '{e}'.") except asyncio.TimeoutError: - logger.warn(f"Connection from {self.name} to {self.remote_host} timed out.") + logger.warning(f"Connection from {self.name} to {self.remote_host} timed out.") except Exception as e: # Early on, this is our canary. I'm not sure what exceptions we can really encounter. - logger.warn(f"Connection from {self.name} to {self.remote_host} failed for unknown reason: '{e}'.") + logger.warning(f"Connection from {self.name} to {self.remote_host} failed for unknown reason: '{e}'.") else: - logger.warn(f"Connection from {self.name} to {self.remote_host} list.") + logger.warning(f"Connection from {self.name} to {self.remote_host} list.") self.stats.record_connection_lost() self.start(attempt=attempt + 1) @@ -146,7 +146,7 @@ class BroadcastWebsocketTask(WebsocketTask): logmsg = "Failed to decode broadcast message" if logger.isEnabledFor(logging.DEBUG): logmsg = "{} {}".format(logmsg, payload) - logger.warn(logmsg) + logger.warning(logmsg) continue (group, message) = unwrap_broadcast_msg(payload) if group == "metrics": @@ -185,9 +185,9 @@ class BroadcastWebsocketManager(object): new_remote_hosts.add(hostname) if deleted_remote_hosts: - logger.warn(f"Removing {deleted_remote_hosts} from websocket broadcast list") + logger.warning(f"Removing {deleted_remote_hosts} from websocket broadcast list") if new_remote_hosts: - logger.warn(f"Adding {new_remote_hosts} to websocket broadcast list") + logger.warning(f"Adding {new_remote_hosts} to websocket broadcast list") for h in deleted_remote_hosts: self.broadcast_tasks[h].cancel() diff --git a/awx/sso/backends.py b/awx/sso/backends.py index 20d6285c14..535cc9579c 100644 --- a/awx/sso/backends.py +++ b/awx/sso/backends.py @@ -179,7 +179,7 @@ def _get_or_set_enterprise_user(username, password, provider): created = True if created or user.is_in_enterprise_category(provider): return user - logger.warn("Enterprise user %s already defined in Tower." % username) + logger.warning("Enterprise user %s already defined in Tower." % username) class RADIUSBackend(BaseRADIUSBackend): @@ -257,7 +257,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider): if isinstance(value, (list, tuple)): value = value[0] if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None: - logger.warn( + logger.warning( "Could not map user detail '%s' from SAML attribute '%s'; " "update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.", conf_key[5:], key, @@ -370,7 +370,7 @@ def on_populate_user(sender, **kwargs): if field_len > max_len: setattr(user, field, getattr(user, field)[:max_len]) force_user_update = True - logger.warn('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len)) + logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len)) # Update organization membership based on group memberships. org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {}) diff --git a/awx/sso/tests/functional/test_get_or_set_enterprise_user.py b/awx/sso/tests/functional/test_get_or_set_enterprise_user.py index 0d48c54d87..3f37b41df3 100644 --- a/awx/sso/tests/functional/test_get_or_set_enterprise_user.py +++ b/awx/sso/tests/functional/test_get_or_set_enterprise_user.py @@ -11,7 +11,7 @@ def test_fetch_user_if_exist(existing_tacacsplus_user): with mock.patch('awx.sso.backends.logger') as mocked_logger: new_user = _get_or_set_enterprise_user("foo", "password", "tacacs+") mocked_logger.debug.assert_not_called() - mocked_logger.warn.assert_not_called() + mocked_logger.warning.assert_not_called() assert new_user == existing_tacacsplus_user @@ -33,5 +33,5 @@ def test_created_user_has_no_usable_password(): def test_non_enterprise_user_does_not_get_pass(existing_normal_user): with mock.patch('awx.sso.backends.logger') as mocked_logger: new_user = _get_or_set_enterprise_user("alice", "password", "tacacs+") - mocked_logger.warn.assert_called_once_with(u'Enterprise user alice already defined in Tower.') + mocked_logger.warning.assert_called_once_with(u'Enterprise user alice already defined in Tower.') assert new_user is None From 9b6fa5543362c425f7f16a97812850096588852c Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 31 Jan 2022 17:19:45 -0500 Subject: [PATCH 080/125] Deal with breaking tests for 3.1 - Django's PostgreSQL JSONField wraps values in a JsonAdapter, so deal with that when it happens. This goes away in Django 3.1. - Setting related *_id fields clears the actual relation field, so trying to fake objects for tests is a problem - Instance.objects.me() was inappropriately creating stub objects every time while running tests, but some of our tests now create real db objects. Ditch that logic and use a proper fixture where needed. - awxkit tox.ini was pinned at Python 3.8 --- awx/main/fields.py | 2 + awx/main/managers.py | 5 - awx/main/tests/conftest.py | 9 +- .../test_inventory_source_injectors.py | 2 +- awx/main/tests/functional/test_tasks.py | 8 +- awx/main/tests/unit/models/test_credential.py | 9 +- awx/main/tests/unit/test_tasks.py | 162 +++++++++--------- awxkit/tox.ini | 2 +- 8 files changed, 103 insertions(+), 96 deletions(-) diff --git a/awx/main/fields.py b/awx/main/fields.py index 1c470f11dd..8f71b53c2f 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -94,6 +94,8 @@ class JSONBField(upstream_JSONBField): def get_db_prep_value(self, value, connection, prepared=False): if connection.vendor == 'sqlite': # sqlite (which we use for tests) does not support jsonb; + if hasattr(value, 'adapted'): + value = value.adapted # FIXME: Django 3.0 uses JsonAdapter, removed in 3.1 return json.dumps(value, cls=DjangoJSONEncoder) return super(JSONBField, self).get_db_prep_value(value, connection, prepared) diff --git a/awx/main/managers.py b/awx/main/managers.py index 4d58f7bc55..8c7ae0901e 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -1,7 +1,6 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -import sys import logging import os from django.db import models @@ -104,10 +103,6 @@ class InstanceManager(models.Manager): def me(self): """Return the currently active instance.""" - # If we are running unit tests, return a stub record. - if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'): - return self.model(id=1, hostname=settings.CLUSTER_HOST_ID, uuid=UUID_DEFAULT) - node = self.filter(hostname=settings.CLUSTER_HOST_ID) if node.exists(): return node[0] diff --git a/awx/main/tests/conftest.py b/awx/main/tests/conftest.py index 0400f025d2..28565901b0 100644 --- a/awx/main/tests/conftest.py +++ b/awx/main/tests/conftest.py @@ -3,7 +3,7 @@ import pytest from unittest import mock from contextlib import contextmanager -from awx.main.models import Credential, UnifiedJob +from awx.main.models import Credential, UnifiedJob, Instance from awx.main.tests.factories import ( create_organization, create_job_template, @@ -212,3 +212,10 @@ def mock_get_event_queryset_no_job_created(): with mock.patch.object(UnifiedJob, 'get_event_queryset', lambda self: event_qs(self)) as _fixture: yield _fixture + + +@pytest.fixture +def mock_me(): + me_mock = mock.MagicMock(return_value=Instance(id=1, hostname=settings.CLUSTER_HOST_ID, uuid='00000000-0000-0000-0000-000000000000')) + with mock.patch.object(Instance.objects, 'me', me_mock): + yield diff --git a/awx/main/tests/functional/test_inventory_source_injectors.py b/awx/main/tests/functional/test_inventory_source_injectors.py index 0d4247feb3..01b7c3e2b0 100644 --- a/awx/main/tests/functional/test_inventory_source_injectors.py +++ b/awx/main/tests/functional/test_inventory_source_injectors.py @@ -181,7 +181,7 @@ def create_reference_data(source_dir, env, content): @pytest.mark.django_db @pytest.mark.parametrize('this_kind', CLOUD_PROVIDERS) -def test_inventory_update_injected_content(this_kind, inventory, fake_credential_factory): +def test_inventory_update_injected_content(this_kind, inventory, fake_credential_factory, mock_me): ExecutionEnvironment.objects.create(name='Control Plane EE', managed=True) ExecutionEnvironment.objects.create(name='Default Job EE', managed=False) diff --git a/awx/main/tests/functional/test_tasks.py b/awx/main/tests/functional/test_tasks.py index 951767d08e..14c48fa5ff 100644 --- a/awx/main/tests/functional/test_tasks.py +++ b/awx/main/tests/functional/test_tasks.py @@ -27,7 +27,7 @@ def test_no_worker_info_on_AWX_nodes(node_type): @pytest.mark.django_db class TestDependentInventoryUpdate: - def test_dependent_inventory_updates_is_called(self, scm_inventory_source, scm_revision_file): + def test_dependent_inventory_updates_is_called(self, scm_inventory_source, scm_revision_file, mock_me): task = RunProjectUpdate() task.revision_path = scm_revision_file proj_update = scm_inventory_source.source_project.create_project_update() @@ -36,7 +36,7 @@ class TestDependentInventoryUpdate: task.post_run_hook(proj_update, 'successful') inv_update_mck.assert_called_once_with(proj_update, mock.ANY) - def test_no_unwanted_dependent_inventory_updates(self, project, scm_revision_file): + def test_no_unwanted_dependent_inventory_updates(self, project, scm_revision_file, mock_me): task = RunProjectUpdate() task.revision_path = scm_revision_file proj_update = project.create_project_update() @@ -45,7 +45,7 @@ class TestDependentInventoryUpdate: task.post_run_hook(proj_update, 'successful') assert not inv_update_mck.called - def test_dependent_inventory_updates(self, scm_inventory_source, default_instance_group): + def test_dependent_inventory_updates(self, scm_inventory_source, default_instance_group, mock_me): task = RunProjectUpdate() scm_inventory_source.scm_last_revision = '' proj_update = ProjectUpdate.objects.create(project=scm_inventory_source.source_project) @@ -57,7 +57,7 @@ class TestDependentInventoryUpdate: iu_run_mock.assert_called_once_with(inv_update.id) assert inv_update.source_project_update_id == proj_update.pk - def test_dependent_inventory_project_cancel(self, project, inventory, default_instance_group): + def test_dependent_inventory_project_cancel(self, project, inventory, default_instance_group, mock_me): """ Test that dependent inventory updates exhibit good behavior on cancel of the source project update diff --git a/awx/main/tests/unit/models/test_credential.py b/awx/main/tests/unit/models/test_credential.py index 082d7df7eb..0dc8daff33 100644 --- a/awx/main/tests/unit/models/test_credential.py +++ b/awx/main/tests/unit/models/test_credential.py @@ -1,12 +1,15 @@ # -*- coding: utf-8 -*- +import pytest + from awx.main.models import Credential, CredentialType +@pytest.mark.django_db def test_unique_hash_with_unicode(): - ct = CredentialType(name=u'Väult', kind='vault') - cred = Credential(id=4, name=u'Iñtërnâtiônàlizætiøn', credential_type=ct, inputs={u'vault_id': u'🐉🐉🐉'}, credential_type_id=42) - assert cred.unique_hash(display=True) == u'Väult (id=🐉🐉🐉)' + ct = CredentialType.objects.create(name='Väult', kind='vault') + cred = Credential.objects.create(name='Iñtërnâtiônàlizætiøn', credential_type=ct, inputs={'vault_id': '🐉🐉🐉'}) + assert cred.unique_hash(display=True) == 'Väult (id=🐉🐉🐉)' def test_custom_cred_with_empty_encrypted_field(): diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 0a19c684fc..200a027e36 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -166,7 +166,7 @@ def test_safe_env_returns_new_copy(): @pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)]) -def test_openstack_client_config_generation(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -206,7 +206,7 @@ def test_openstack_client_config_generation(mocker, source, expected, private_da @pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)]) -def test_openstack_client_config_generation_with_project_domain_name(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation_with_project_domain_name(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -248,7 +248,7 @@ def test_openstack_client_config_generation_with_project_domain_name(mocker, sou @pytest.mark.parametrize("source,expected", [(None, True), (False, False), (True, True)]) -def test_openstack_client_config_generation_with_region(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation_with_region(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -292,7 +292,7 @@ def test_openstack_client_config_generation_with_region(mocker, source, expected @pytest.mark.parametrize("source,expected", [(False, False), (True, True)]) -def test_openstack_client_config_generation_with_private_source_vars(mocker, source, expected, private_data_dir): +def test_openstack_client_config_generation_with_private_source_vars(mocker, source, expected, private_data_dir, mock_me): update = jobs.RunInventoryUpdate() credential_type = CredentialType.defaults['openstack']() inputs = { @@ -352,7 +352,7 @@ class TestExtraVarSanitation(TestJobExecution): UNSAFE = '{{ lookup(' 'pipe' ',' 'ls -la' ') }}' - def test_vars_unsafe_by_default(self, job, private_data_dir): + def test_vars_unsafe_by_default(self, job, private_data_dir, mock_me): job.created_by = User(pk=123, username='angry-spud') job.inventory = Inventory(pk=123, name='example-inv') @@ -390,7 +390,7 @@ class TestExtraVarSanitation(TestJobExecution): ]: assert not hasattr(extra_vars[safe], '__UNSAFE__') - def test_launchtime_vars_unsafe(self, job, private_data_dir): + def test_launchtime_vars_unsafe(self, job, private_data_dir, mock_me): job.extra_vars = json.dumps({'msg': self.UNSAFE}) task = jobs.RunJob() @@ -401,7 +401,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == self.UNSAFE assert hasattr(extra_vars['msg'], '__UNSAFE__') - def test_nested_launchtime_vars_unsafe(self, job, private_data_dir): + def test_nested_launchtime_vars_unsafe(self, job, private_data_dir, mock_me): job.extra_vars = json.dumps({'msg': {'a': [self.UNSAFE]}}) task = jobs.RunJob() @@ -412,7 +412,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == {'a': [self.UNSAFE]} assert hasattr(extra_vars['msg']['a'][0], '__UNSAFE__') - def test_allowed_jt_extra_vars(self, job, private_data_dir): + def test_allowed_jt_extra_vars(self, job, private_data_dir, mock_me): job.job_template.extra_vars = job.extra_vars = json.dumps({'msg': self.UNSAFE}) task = jobs.RunJob() @@ -423,7 +423,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == self.UNSAFE assert not hasattr(extra_vars['msg'], '__UNSAFE__') - def test_nested_allowed_vars(self, job, private_data_dir): + def test_nested_allowed_vars(self, job, private_data_dir, mock_me): job.extra_vars = json.dumps({'msg': {'a': {'b': [self.UNSAFE]}}}) job.job_template.extra_vars = job.extra_vars task = jobs.RunJob() @@ -435,7 +435,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['msg'] == {'a': {'b': [self.UNSAFE]}} assert not hasattr(extra_vars['msg']['a']['b'][0], '__UNSAFE__') - def test_sensitive_values_dont_leak(self, job, private_data_dir): + def test_sensitive_values_dont_leak(self, job, private_data_dir, mock_me): # JT defines `msg=SENSITIVE`, the job *should not* be able to do # `other_var=SENSITIVE` job.job_template.extra_vars = json.dumps({'msg': self.UNSAFE}) @@ -452,7 +452,7 @@ class TestExtraVarSanitation(TestJobExecution): assert extra_vars['other_var'] == self.UNSAFE assert hasattr(extra_vars['other_var'], '__UNSAFE__') - def test_overwritten_jt_extra_vars(self, job, private_data_dir): + def test_overwritten_jt_extra_vars(self, job, private_data_dir, mock_me): job.job_template.extra_vars = json.dumps({'msg': 'SAFE'}) job.extra_vars = json.dumps({'msg': self.UNSAFE}) task = jobs.RunJob() @@ -466,7 +466,7 @@ class TestExtraVarSanitation(TestJobExecution): class TestGenericRun: - def test_generic_failure(self, patch_Job, execution_environment): + def test_generic_failure(self, patch_Job, execution_environment, mock_me): job = Job(status='running', inventory=Inventory(), project=Project(local_path='/projects/_23_foo')) job.websocket_emit_status = mock.Mock() job.execution_environment = execution_environment @@ -486,7 +486,7 @@ class TestGenericRun: assert update_model_call['status'] == 'error' assert update_model_call['emitted_events'] == 0 - def test_cancel_flag(self, job, update_model_wrapper, execution_environment): + def test_cancel_flag(self, job, update_model_wrapper, execution_environment, mock_me): job.status = 'running' job.cancel_flag = True job.websocket_emit_status = mock.Mock() @@ -506,7 +506,7 @@ class TestGenericRun: for c in [mock.call(1, status='running', start_args=''), mock.call(1, status='canceled')]: assert c in task.update_model.call_args_list - def test_event_count(self): + def test_event_count(self, mock_me): task = jobs.RunJob() task.runner_callback.dispatcher = mock.MagicMock() task.runner_callback.instance = Job() @@ -516,7 +516,7 @@ class TestGenericRun: [task.runner_callback.event_handler(event_data) for i in range(20)] assert 20 == task.runner_callback.event_ct - def test_finished_callback_eof(self): + def test_finished_callback_eof(self, mock_me): task = jobs.RunJob() task.runner_callback.dispatcher = mock.MagicMock() task.runner_callback.instance = Job(pk=1, id=1) @@ -524,7 +524,7 @@ class TestGenericRun: task.runner_callback.finished_callback(None) task.runner_callback.dispatcher.dispatch.assert_called_with({'event': 'EOF', 'final_counter': 17, 'job_id': 1, 'guid': None}) - def test_save_job_metadata(self, job, update_model_wrapper): + def test_save_job_metadata(self, job, update_model_wrapper, mock_me): class MockMe: pass @@ -542,7 +542,7 @@ class TestGenericRun: 1, job_args=json.dumps({'foo': 'bar'}), job_cwd='/foobar', job_env={'switch': 'blade', 'foot': 'ball', 'secret_key': 'redacted_value'} ) - def test_created_by_extra_vars(self): + def test_created_by_extra_vars(self, mock_me): job = Job(created_by=User(pk=123, username='angry-spud')) task = jobs.RunJob() @@ -557,7 +557,7 @@ class TestGenericRun: assert extra_vars['awx_user_id'] == 123 assert extra_vars['awx_user_name'] == "angry-spud" - def test_survey_extra_vars(self): + def test_survey_extra_vars(self, mock_me): job = Job() job.extra_vars = json.dumps({'super_secret': encrypt_value('CLASSIFIED', pk=None)}) job.survey_passwords = {'super_secret': '$encrypted$'} @@ -571,7 +571,7 @@ class TestGenericRun: private_data_dir, extra_vars, safe_dict = call_args assert extra_vars['super_secret'] == "CLASSIFIED" - def test_awx_task_env(self, patch_Job, private_data_dir, execution_environment): + def test_awx_task_env(self, patch_Job, private_data_dir, execution_environment, mock_me): job = Job(project=Project(), inventory=Inventory()) job.execution_environment = execution_environment @@ -586,7 +586,7 @@ class TestGenericRun: @pytest.mark.django_db class TestAdhocRun(TestJobExecution): - def test_options_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper): + def test_options_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper, mock_me): ExecutionEnvironment.objects.create(name='Control Plane EE', managed=True) ExecutionEnvironment.objects.create(name='Default Job EE', managed=False) @@ -611,7 +611,7 @@ class TestAdhocRun(TestJobExecution): be wrapped in unsafe ''' ''' - def test_extra_vars_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper): + def test_extra_vars_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper, mock_me): adhoc_job.module_args = 'ls' adhoc_job.extra_vars = json.dumps({ 'foo': '{{ bar }}' @@ -630,7 +630,7 @@ class TestAdhocRun(TestJobExecution): assert extra_vars['foo'] == '{{ bar }}' ''' - def test_created_by_extra_vars(self): + def test_created_by_extra_vars(self, mock_me): adhoc_job = AdHocCommand(created_by=User(pk=123, username='angry-spud')) task = jobs.RunAdHocCommand() @@ -691,7 +691,7 @@ class TestJobCredentials(TestJobExecution): ] } - def test_username_jinja_usage(self, job, private_data_dir): + def test_username_jinja_usage(self, job, private_data_dir, mock_me): task = jobs.RunJob() ssh = CredentialType.defaults['ssh']() credential = Credential(pk=1, credential_type=ssh, inputs={'username': '{{ ansible_ssh_pass }}'}) @@ -702,7 +702,7 @@ class TestJobCredentials(TestJobExecution): assert 'Jinja variables are not allowed' in str(e.value) @pytest.mark.parametrize("flag", ['become_username', 'become_method']) - def test_become_jinja_usage(self, job, private_data_dir, flag): + def test_become_jinja_usage(self, job, private_data_dir, flag, mock_me): task = jobs.RunJob() ssh = CredentialType.defaults['ssh']() credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'joe', flag: '{{ ansible_ssh_pass }}'}) @@ -713,7 +713,7 @@ class TestJobCredentials(TestJobExecution): assert 'Jinja variables are not allowed' in str(e.value) - def test_ssh_passwords(self, job, private_data_dir, field, password_name, expected_flag): + def test_ssh_passwords(self, job, private_data_dir, field, password_name, expected_flag, mock_me): task = jobs.RunJob() ssh = CredentialType.defaults['ssh']() credential = Credential(pk=1, credential_type=ssh, inputs={'username': 'bob', field: 'secret'}) @@ -730,7 +730,7 @@ class TestJobCredentials(TestJobExecution): if expected_flag: assert expected_flag in ' '.join(args) - def test_net_ssh_key_unlock(self, job): + def test_net_ssh_key_unlock(self, job, mock_me): task = jobs.RunJob() net = CredentialType.defaults['net']() credential = Credential(pk=1, credential_type=net, inputs={'ssh_key_unlock': 'secret'}) @@ -743,7 +743,7 @@ class TestJobCredentials(TestJobExecution): assert 'secret' in expect_passwords.values() - def test_net_first_ssh_key_unlock_wins(self, job): + def test_net_first_ssh_key_unlock_wins(self, job, mock_me): task = jobs.RunJob() for i in range(3): net = CredentialType.defaults['net']() @@ -757,7 +757,7 @@ class TestJobCredentials(TestJobExecution): assert 'secret0' in expect_passwords.values() - def test_prefer_ssh_over_net_ssh_key_unlock(self, job): + def test_prefer_ssh_over_net_ssh_key_unlock(self, job, mock_me): task = jobs.RunJob() net = CredentialType.defaults['net']() net_credential = Credential(pk=1, credential_type=net, inputs={'ssh_key_unlock': 'net_secret'}) @@ -776,7 +776,7 @@ class TestJobCredentials(TestJobExecution): assert 'ssh_secret' in expect_passwords.values() - def test_vault_password(self, private_data_dir, job): + def test_vault_password(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() credential = Credential(pk=1, credential_type=vault, inputs={'vault_password': 'vault-me'}) @@ -791,7 +791,7 @@ class TestJobCredentials(TestJobExecution): assert expect_passwords['Vault password:\s*?$'] == 'vault-me' # noqa assert '--ask-vault-pass' in ' '.join(args) - def test_vault_password_ask(self, private_data_dir, job): + def test_vault_password_ask(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() credential = Credential(pk=1, credential_type=vault, inputs={'vault_password': 'ASK'}) @@ -806,7 +806,7 @@ class TestJobCredentials(TestJobExecution): assert expect_passwords['Vault password:\s*?$'] == 'provided-at-launch' # noqa assert '--ask-vault-pass' in ' '.join(args) - def test_multi_vault_password(self, private_data_dir, job): + def test_multi_vault_password(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() for i, label in enumerate(['dev', 'prod', 'dotted.name']): @@ -829,7 +829,7 @@ class TestJobCredentials(TestJobExecution): assert '--vault-id prod@prompt' in ' '.join(args) assert '--vault-id dotted.name@prompt' in ' '.join(args) - def test_multi_vault_id_conflict(self, job): + def test_multi_vault_id_conflict(self, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() for i in range(2): @@ -842,7 +842,7 @@ class TestJobCredentials(TestJobExecution): assert 'multiple vault credentials were specified with --vault-id' in str(e.value) - def test_multi_vault_password_ask(self, private_data_dir, job): + def test_multi_vault_password_ask(self, private_data_dir, job, mock_me): task = jobs.RunJob() vault = CredentialType.defaults['vault']() for i, label in enumerate(['dev', 'prod']): @@ -863,7 +863,7 @@ class TestJobCredentials(TestJobExecution): assert '--vault-id prod@prompt' in ' '.join(args) @pytest.mark.parametrize("verify", (True, False)) - def test_k8s_credential(self, job, private_data_dir, verify): + def test_k8s_credential(self, job, private_data_dir, verify, mock_me): k8s = CredentialType.defaults['kubernetes_bearer_token']() inputs = { 'host': 'https://example.org/', @@ -898,7 +898,7 @@ class TestJobCredentials(TestJobExecution): assert safe_env['K8S_AUTH_API_KEY'] == HIDDEN_PASSWORD - def test_aws_cloud_credential(self, job, private_data_dir): + def test_aws_cloud_credential(self, job, private_data_dir, mock_me): aws = CredentialType.defaults['aws']() credential = Credential(pk=1, credential_type=aws, inputs={'username': 'bob', 'password': 'secret'}) credential.inputs['password'] = encrypt_field(credential, 'password') @@ -913,7 +913,7 @@ class TestJobCredentials(TestJobExecution): assert 'AWS_SECURITY_TOKEN' not in env assert safe_env['AWS_SECRET_ACCESS_KEY'] == HIDDEN_PASSWORD - def test_aws_cloud_credential_with_sts_token(self, private_data_dir, job): + def test_aws_cloud_credential_with_sts_token(self, private_data_dir, job, mock_me): aws = CredentialType.defaults['aws']() credential = Credential(pk=1, credential_type=aws, inputs={'username': 'bob', 'password': 'secret', 'security_token': 'token'}) for key in ('password', 'security_token'): @@ -929,7 +929,7 @@ class TestJobCredentials(TestJobExecution): assert env['AWS_SECURITY_TOKEN'] == 'token' assert safe_env['AWS_SECRET_ACCESS_KEY'] == HIDDEN_PASSWORD - def test_gce_credentials(self, private_data_dir, job): + def test_gce_credentials(self, private_data_dir, job, mock_me): gce = CredentialType.defaults['gce']() credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY}) credential.inputs['ssh_key_data'] = encrypt_field(credential, 'ssh_key_data') @@ -946,7 +946,7 @@ class TestJobCredentials(TestJobExecution): assert json_data['client_email'] == 'bob' assert json_data['project_id'] == 'some-project' - def test_azure_rm_with_tenant(self, private_data_dir, job): + def test_azure_rm_with_tenant(self, private_data_dir, job, mock_me): azure = CredentialType.defaults['azure_rm']() credential = Credential( pk=1, credential_type=azure, inputs={'client': 'some-client', 'secret': 'some-secret', 'tenant': 'some-tenant', 'subscription': 'some-subscription'} @@ -964,7 +964,7 @@ class TestJobCredentials(TestJobExecution): assert env['AZURE_SUBSCRIPTION_ID'] == 'some-subscription' assert safe_env['AZURE_SECRET'] == HIDDEN_PASSWORD - def test_azure_rm_with_password(self, private_data_dir, job): + def test_azure_rm_with_password(self, private_data_dir, job, mock_me): azure = CredentialType.defaults['azure_rm']() credential = Credential( pk=1, credential_type=azure, inputs={'subscription': 'some-subscription', 'username': 'bob', 'password': 'secret', 'cloud_environment': 'foobar'} @@ -982,7 +982,7 @@ class TestJobCredentials(TestJobExecution): assert env['AZURE_CLOUD_ENVIRONMENT'] == 'foobar' assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD - def test_vmware_credentials(self, private_data_dir, job): + def test_vmware_credentials(self, private_data_dir, job, mock_me): vmware = CredentialType.defaults['vmware']() credential = Credential(pk=1, credential_type=vmware, inputs={'username': 'bob', 'password': 'secret', 'host': 'https://example.org'}) credential.inputs['password'] = encrypt_field(credential, 'password') @@ -997,7 +997,7 @@ class TestJobCredentials(TestJobExecution): assert env['VMWARE_HOST'] == 'https://example.org' assert safe_env['VMWARE_PASSWORD'] == HIDDEN_PASSWORD - def test_openstack_credentials(self, private_data_dir, job): + def test_openstack_credentials(self, private_data_dir, job, mock_me): task = jobs.RunJob() task.instance = job openstack = CredentialType.defaults['openstack']() @@ -1028,7 +1028,7 @@ class TestJobCredentials(TestJobExecution): ) @pytest.mark.parametrize("ca_file", [None, '/path/to/some/file']) - def test_rhv_credentials(self, private_data_dir, job, ca_file): + def test_rhv_credentials(self, private_data_dir, job, ca_file, mock_me): rhv = CredentialType.defaults['rhv']() inputs = { 'host': 'some-ovirt-host.example.org', @@ -1065,7 +1065,7 @@ class TestJobCredentials(TestJobExecution): [None, '0'], ], ) - def test_net_credentials(self, authorize, expected_authorize, job, private_data_dir): + def test_net_credentials(self, authorize, expected_authorize, job, private_data_dir, mock_me): task = jobs.RunJob() task.instance = job net = CredentialType.defaults['net']() @@ -1090,7 +1090,7 @@ class TestJobCredentials(TestJobExecution): assert open(env['ANSIBLE_NET_SSH_KEYFILE'], 'r').read() == self.EXAMPLE_PRIVATE_KEY assert safe_env['ANSIBLE_NET_PASSWORD'] == HIDDEN_PASSWORD - def test_custom_environment_injectors_with_jinja_syntax_error(self, private_data_dir): + def test_custom_environment_injectors_with_jinja_syntax_error(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1103,7 +1103,7 @@ class TestJobCredentials(TestJobExecution): with pytest.raises(jinja2.exceptions.UndefinedError): credential.credential_type.inject_credential(credential, {}, {}, [], private_data_dir) - def test_custom_environment_injectors(self, private_data_dir): + def test_custom_environment_injectors(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1118,7 +1118,7 @@ class TestJobCredentials(TestJobExecution): assert env['MY_CLOUD_API_TOKEN'] == 'ABC123' - def test_custom_environment_injectors_with_boolean_env_var(self, private_data_dir): + def test_custom_environment_injectors_with_boolean_env_var(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1133,7 +1133,7 @@ class TestJobCredentials(TestJobExecution): assert env['TURBO_BUTTON'] == str(True) - def test_custom_environment_injectors_with_reserved_env_var(self, private_data_dir, job): + def test_custom_environment_injectors_with_reserved_env_var(self, private_data_dir, job, mock_me): task = jobs.RunJob() task.instance = job some_cloud = CredentialType( @@ -1150,7 +1150,7 @@ class TestJobCredentials(TestJobExecution): assert env['JOB_ID'] == str(job.pk) - def test_custom_environment_injectors_with_secret_field(self, private_data_dir): + def test_custom_environment_injectors_with_secret_field(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1169,7 +1169,7 @@ class TestJobCredentials(TestJobExecution): assert 'SUPER-SECRET-123' not in safe_env.values() assert safe_env['MY_CLOUD_PRIVATE_VAR'] == HIDDEN_PASSWORD - def test_custom_environment_injectors_with_extra_vars(self, private_data_dir, job): + def test_custom_environment_injectors_with_extra_vars(self, private_data_dir, job, mock_me): task = jobs.RunJob() some_cloud = CredentialType( kind='cloud', @@ -1188,7 +1188,7 @@ class TestJobCredentials(TestJobExecution): assert extra_vars["api_token"] == "ABC123" assert hasattr(extra_vars["api_token"], '__UNSAFE__') - def test_custom_environment_injectors_with_boolean_extra_vars(self, job, private_data_dir): + def test_custom_environment_injectors_with_boolean_extra_vars(self, job, private_data_dir, mock_me): task = jobs.RunJob() some_cloud = CredentialType( kind='cloud', @@ -1207,7 +1207,7 @@ class TestJobCredentials(TestJobExecution): assert extra_vars["turbo_button"] == "True" return ['successful', 0] - def test_custom_environment_injectors_with_complicated_boolean_template(self, job, private_data_dir): + def test_custom_environment_injectors_with_complicated_boolean_template(self, job, private_data_dir, mock_me): task = jobs.RunJob() some_cloud = CredentialType( kind='cloud', @@ -1225,7 +1225,7 @@ class TestJobCredentials(TestJobExecution): assert extra_vars["turbo_button"] == "FAST!" - def test_custom_environment_injectors_with_secret_extra_vars(self, job, private_data_dir): + def test_custom_environment_injectors_with_secret_extra_vars(self, job, private_data_dir, mock_me): """ extra_vars that contain secret field values should be censored in the DB """ @@ -1247,7 +1247,7 @@ class TestJobCredentials(TestJobExecution): extra_vars = parse_extra_vars(args, private_data_dir) assert extra_vars["password"] == "SUPER-SECRET-123" - def test_custom_environment_injectors_with_file(self, private_data_dir): + def test_custom_environment_injectors_with_file(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1263,7 +1263,7 @@ class TestJobCredentials(TestJobExecution): path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir) assert open(path, 'r').read() == '[mycloud]\nABC123' - def test_custom_environment_injectors_with_unicode_content(self, private_data_dir): + def test_custom_environment_injectors_with_unicode_content(self, private_data_dir, mock_me): value = 'Iñtërnâtiônàlizætiøn' some_cloud = CredentialType( kind='cloud', @@ -1283,7 +1283,7 @@ class TestJobCredentials(TestJobExecution): path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir) assert open(path, 'r').read() == value - def test_custom_environment_injectors_with_files(self, private_data_dir): + def test_custom_environment_injectors_with_files(self, private_data_dir, mock_me): some_cloud = CredentialType( kind='cloud', name='SomeCloud', @@ -1304,7 +1304,7 @@ class TestJobCredentials(TestJobExecution): assert open(cert_path, 'r').read() == '[mycert]\nCERT123' assert open(key_path, 'r').read() == '[mykey]\nKEY123' - def test_multi_cloud(self, private_data_dir): + def test_multi_cloud(self, private_data_dir, mock_me): gce = CredentialType.defaults['gce']() gce_credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY}) gce_credential.inputs['ssh_key_data'] = encrypt_field(gce_credential, 'ssh_key_data') @@ -1332,7 +1332,7 @@ class TestJobCredentials(TestJobExecution): assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD - def test_awx_task_env(self, settings, private_data_dir, job): + def test_awx_task_env(self, settings, private_data_dir, job, mock_me): settings.AWX_TASK_ENV = {'FOO': 'BAR'} task = jobs.RunJob() task.instance = job @@ -1359,7 +1359,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): ], } - def test_galaxy_credentials_ignore_certs(self, private_data_dir, project_update, ignore): + def test_galaxy_credentials_ignore_certs(self, private_data_dir, project_update, ignore, mock_me): settings.GALAXY_IGNORE_CERTS = ignore task = jobs.RunProjectUpdate() task.instance = project_update @@ -1369,7 +1369,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): else: assert 'ANSIBLE_GALAXY_IGNORE' not in env - def test_galaxy_credentials_empty(self, private_data_dir, project_update): + def test_galaxy_credentials_empty(self, private_data_dir, project_update, mock_me): class RunProjectUpdate(jobs.RunProjectUpdate): __vars__ = {} @@ -1388,7 +1388,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): for k in env: assert not k.startswith('ANSIBLE_GALAXY_SERVER') - def test_single_public_galaxy(self, private_data_dir, project_update): + def test_single_public_galaxy(self, private_data_dir, project_update, mock_me): class RunProjectUpdate(jobs.RunProjectUpdate): __vars__ = {} @@ -1418,7 +1418,7 @@ class TestProjectUpdateGalaxyCredentials(TestJobExecution): ('ANSIBLE_GALAXY_SERVER_SERVER0_URL', 'https://galaxy.ansible.com/'), ] - def test_multiple_galaxy_endpoints(self, private_data_dir, project_update): + def test_multiple_galaxy_endpoints(self, private_data_dir, project_update, mock_me): credential_type = CredentialType.defaults['galaxy_api_token']() public_galaxy = Credential( pk=1, @@ -1479,7 +1479,7 @@ class TestProjectUpdateCredentials(TestJobExecution): ], } - def test_username_and_password_auth(self, project_update, scm_type): + def test_username_and_password_auth(self, project_update, scm_type, mock_me): task = jobs.RunProjectUpdate() ssh = CredentialType.defaults['ssh']() project_update.scm_type = scm_type @@ -1493,7 +1493,7 @@ class TestProjectUpdateCredentials(TestJobExecution): assert 'bob' in expect_passwords.values() assert 'secret' in expect_passwords.values() - def test_ssh_key_auth(self, project_update, scm_type): + def test_ssh_key_auth(self, project_update, scm_type, mock_me): task = jobs.RunProjectUpdate() ssh = CredentialType.defaults['ssh']() project_update.scm_type = scm_type @@ -1505,7 +1505,7 @@ class TestProjectUpdateCredentials(TestJobExecution): expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) assert 'bob' in expect_passwords.values() - def test_awx_task_env(self, project_update, settings, private_data_dir, scm_type, execution_environment): + def test_awx_task_env(self, project_update, settings, private_data_dir, scm_type, execution_environment, mock_me): project_update.execution_environment = execution_environment settings.AWX_TASK_ENV = {'FOO': 'BAR'} task = jobs.RunProjectUpdate() @@ -1522,7 +1522,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): def inventory_update(self, execution_environment): return InventoryUpdate(pk=1, execution_environment=execution_environment, inventory_source=InventorySource(pk=1, inventory=Inventory(pk=1))) - def test_source_without_credential(self, mocker, inventory_update, private_data_dir): + def test_source_without_credential(self, mocker, inventory_update, private_data_dir, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update inventory_update.source = 'ec2' @@ -1535,7 +1535,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert 'AWS_ACCESS_KEY_ID' not in env assert 'AWS_SECRET_ACCESS_KEY' not in env - def test_ec2_source(self, private_data_dir, inventory_update, mocker): + def test_ec2_source(self, private_data_dir, inventory_update, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update aws = CredentialType.defaults['aws']() @@ -1559,7 +1559,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AWS_SECRET_ACCESS_KEY'] == HIDDEN_PASSWORD - def test_vmware_source(self, inventory_update, private_data_dir, mocker): + def test_vmware_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update vmware = CredentialType.defaults['vmware']() @@ -1587,7 +1587,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): env["VMWARE_HOST"] == "https://example.org", env["VMWARE_VALIDATE_CERTS"] == "False", - def test_azure_rm_source_with_tenant(self, private_data_dir, inventory_update, mocker): + def test_azure_rm_source_with_tenant(self, private_data_dir, inventory_update, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update azure_rm = CredentialType.defaults['azure_rm']() @@ -1623,7 +1623,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AZURE_SECRET'] == HIDDEN_PASSWORD - def test_azure_rm_source_with_password(self, private_data_dir, inventory_update, mocker): + def test_azure_rm_source_with_password(self, private_data_dir, inventory_update, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update azure_rm = CredentialType.defaults['azure_rm']() @@ -1652,7 +1652,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AZURE_PASSWORD'] == HIDDEN_PASSWORD - def test_gce_source(self, inventory_update, private_data_dir, mocker): + def test_gce_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update gce = CredentialType.defaults['gce']() @@ -1682,7 +1682,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert json_data['client_email'] == 'bob' assert json_data['project_id'] == 'some-project' - def test_openstack_source(self, inventory_update, private_data_dir, mocker): + def test_openstack_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update openstack = CredentialType.defaults['openstack']() @@ -1722,7 +1722,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): in shade_config ) - def test_satellite6_source(self, inventory_update, private_data_dir, mocker): + def test_satellite6_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update satellite6 = CredentialType.defaults['satellite6']() @@ -1745,7 +1745,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env["FOREMAN_PASSWORD"] == "secret" assert safe_env["FOREMAN_PASSWORD"] == HIDDEN_PASSWORD - def test_insights_source(self, inventory_update, private_data_dir, mocker): + def test_insights_source(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update insights = CredentialType.defaults['insights']() @@ -1774,7 +1774,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['INSIGHTS_PASSWORD'] == HIDDEN_PASSWORD @pytest.mark.parametrize('verify', [True, False]) - def test_tower_source(self, verify, inventory_update, private_data_dir, mocker): + def test_tower_source(self, verify, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update tower = CredentialType.defaults['controller']() @@ -1802,7 +1802,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['CONTROLLER_VERIFY_SSL'] == 'False' assert safe_env['CONTROLLER_PASSWORD'] == HIDDEN_PASSWORD - def test_tower_source_ssl_verify_empty(self, inventory_update, private_data_dir, mocker): + def test_tower_source_ssl_verify_empty(self, inventory_update, private_data_dir, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update tower = CredentialType.defaults['controller']() @@ -1830,7 +1830,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['TOWER_VERIFY_SSL'] == 'False' - def test_awx_task_env(self, inventory_update, private_data_dir, settings, mocker): + def test_awx_task_env(self, inventory_update, private_data_dir, settings, mocker, mock_me): task = jobs.RunInventoryUpdate() task.instance = inventory_update gce = CredentialType.defaults['gce']() @@ -1869,7 +1869,7 @@ def test_fcntl_ioerror(): @mock.patch('os.open') @mock.patch('logging.getLogger') -def test_aquire_lock_open_fail_logged(logging_getLogger, os_open): +def test_acquire_lock_open_fail_logged(logging_getLogger, os_open, mock_me): err = OSError() err.errno = 3 err.strerror = 'dummy message' @@ -1893,7 +1893,7 @@ def test_aquire_lock_open_fail_logged(logging_getLogger, os_open): @mock.patch('os.close') @mock.patch('logging.getLogger') @mock.patch('fcntl.lockf') -def test_aquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_close, os_open): +def test_acquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_close, os_open, mock_me): err = IOError() err.errno = 3 err.strerror = 'dummy message' @@ -1913,7 +1913,7 @@ def test_aquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_ with pytest.raises(IOError): ProjectUpdate.acquire_lock(instance) os_close.assert_called_with(3) - assert logger.err.called_with("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message')) + assert logger.err.called_with("I/O error({0}) while trying to acquire lock on file [{1}]: {2}".format(3, 'this_file_does_not_exist', 'dummy message')) @pytest.mark.parametrize('injector_cls', [cls for cls in ManagedCredentialType.registry.values() if cls.injectors]) @@ -1958,7 +1958,7 @@ def test_notification_job_finished(mocker): uj.send_notification_templates.assert_called() -def test_job_run_no_ee(): +def test_job_run_no_ee(mock_me): org = Organization(pk=1) proj = Project(pk=1, organization=org) job = Job(project=proj, organization=org, inventory=Inventory(pk=1)) @@ -1977,7 +1977,7 @@ def test_job_run_no_ee(): assert 'Job could not start because no Execution Environment could be found' in str(e.value) -def test_project_update_no_ee(): +def test_project_update_no_ee(mock_me): org = Organization(pk=1) proj = Project(pk=1, organization=org) project_update = ProjectUpdate(pk=1, project=proj, scm_type='git') diff --git a/awxkit/tox.ini b/awxkit/tox.ini index 73c9493e1d..3e63d73673 100644 --- a/awxkit/tox.ini +++ b/awxkit/tox.ini @@ -8,7 +8,7 @@ skip_missing_interpreters = true # skipsdist = true [testenv] -basepython = python3.8 +basepython = python3.9 passenv = TRAVIS TRAVIS_JOB_ID TRAVIS_BRANCH setenv = PYTHONPATH = {toxinidir}:{env:PYTHONPATH:}:. From faa12880a9814155eac55acf0701d70d0f455f30 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 31 Jan 2022 18:12:20 -0500 Subject: [PATCH 081/125] Squash a few deprecation warnings - inspect.getargspec() -> inspect.getfullargspec() - register pytest.mark.fixture_args - replace use of DRF's deprecated NullBooleanField - fix some usage of naive datetimes in the tests - fix some strings with backslashes that ought to be raw strings --- awx/api/fields.py | 12 ++++++++---- awx/api/filters.py | 4 +--- awx/api/serializers.py | 2 +- awx/conf/fields.py | 14 +++++++------- awx/conf/migrations/_ldap_group_type.py | 2 +- awx/main/tasks/__init__.py | 1 + .../functional/api/test_unified_jobs_stdout.py | 13 +++++++------ awx/main/tests/unit/test_tasks.py | 18 +++++++++--------- awx/main/validators.py | 2 +- awx/settings/defaults.py | 3 ++- awx/sso/fields.py | 16 ++++++++-------- awx/ui/fields.py | 2 +- pytest.ini | 1 + 13 files changed, 48 insertions(+), 42 deletions(-) diff --git a/awx/api/fields.py b/awx/api/fields.py index 98c1bd8eac..c84b6327f9 100644 --- a/awx/api/fields.py +++ b/awx/api/fields.py @@ -28,13 +28,17 @@ class NullFieldMixin(object): return (is_empty_value, data) -class BooleanNullField(NullFieldMixin, serializers.NullBooleanField): +class BooleanNullField(NullFieldMixin, serializers.BooleanField): """ Custom boolean field that allows null and empty string as False values. """ + def __init__(self, **kwargs): + kwargs['allow_null'] = True + super().__init__(**kwargs) + def to_internal_value(self, data): - return bool(super(BooleanNullField, self).to_internal_value(data)) + return bool(super().to_internal_value(data)) class CharNullField(NullFieldMixin, serializers.CharField): @@ -47,7 +51,7 @@ class CharNullField(NullFieldMixin, serializers.CharField): super(CharNullField, self).__init__(**kwargs) def to_internal_value(self, data): - return super(CharNullField, self).to_internal_value(data or u'') + return super(CharNullField, self).to_internal_value(data or '') class ChoiceNullField(NullFieldMixin, serializers.ChoiceField): @@ -60,7 +64,7 @@ class ChoiceNullField(NullFieldMixin, serializers.ChoiceField): super(ChoiceNullField, self).__init__(**kwargs) def to_internal_value(self, data): - return super(ChoiceNullField, self).to_internal_value(data or u'') + return super(ChoiceNullField, self).to_internal_value(data or '') class VerbatimField(serializers.Field): diff --git a/awx/api/filters.py b/awx/api/filters.py index 2856e58f76..5d2af58f72 100644 --- a/awx/api/filters.py +++ b/awx/api/filters.py @@ -192,9 +192,7 @@ class FieldLookupBackend(BaseFilterBackend): return int(value) def value_to_python_for_field(self, field, value): - if isinstance(field, models.NullBooleanField): - return to_python_boolean(value, allow_none=True) - elif isinstance(field, models.BooleanField): + if isinstance(field, models.BooleanField): return to_python_boolean(value) elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)): try: diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 4cd4f01fb6..a63e651840 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -3628,7 +3628,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer): job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None) - diff_mode = serializers.NullBooleanField(required=False, default=None) + diff_mode = serializers.BooleanField(required=False, allow_null=True, default=None) verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None, choices=VERBOSITY_CHOICES) exclude_errors = () diff --git a/awx/conf/fields.py b/awx/conf/fields.py index 889f71ca23..7802b2a085 100644 --- a/awx/conf/fields.py +++ b/awx/conf/fields.py @@ -10,7 +10,7 @@ from django.core.validators import URLValidator, _lazy_re_compile from django.utils.translation import gettext_lazy as _ # Django REST Framework -from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField # noqa +from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField # noqa from rest_framework.serializers import PrimaryKeyRelatedField # noqa # AWX @@ -65,11 +65,11 @@ class StringListBooleanField(ListField): try: if isinstance(value, (list, tuple)): return super(StringListBooleanField, self).to_representation(value) - elif value in NullBooleanField.TRUE_VALUES: + elif value in BooleanField.TRUE_VALUES: return True - elif value in NullBooleanField.FALSE_VALUES: + elif value in BooleanField.FALSE_VALUES: return False - elif value in NullBooleanField.NULL_VALUES: + elif value in BooleanField.NULL_VALUES: return None elif isinstance(value, str): return self.child.to_representation(value) @@ -82,11 +82,11 @@ class StringListBooleanField(ListField): try: if isinstance(data, (list, tuple)): return super(StringListBooleanField, self).to_internal_value(data) - elif data in NullBooleanField.TRUE_VALUES: + elif data in BooleanField.TRUE_VALUES: return True - elif data in NullBooleanField.FALSE_VALUES: + elif data in BooleanField.FALSE_VALUES: return False - elif data in NullBooleanField.NULL_VALUES: + elif data in BooleanField.NULL_VALUES: return None elif isinstance(data, str): return self.child.run_validation(data) diff --git a/awx/conf/migrations/_ldap_group_type.py b/awx/conf/migrations/_ldap_group_type.py index e8de5ca4aa..09caa2d28b 100644 --- a/awx/conf/migrations/_ldap_group_type.py +++ b/awx/conf/migrations/_ldap_group_type.py @@ -17,7 +17,7 @@ def fill_ldap_group_type_params(apps, schema_editor): else: entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now()) - init_attrs = set(inspect.getargspec(group_type.__init__).args[1:]) + init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:]) for k in list(group_type_params.keys()): if k not in init_attrs: del group_type_params[k] diff --git a/awx/main/tasks/__init__.py b/awx/main/tasks/__init__.py index e69de29bb2..517df4a285 100644 --- a/awx/main/tasks/__init__.py +++ b/awx/main/tasks/__init__.py @@ -0,0 +1 @@ +from . import jobs, receptor, system # noqa diff --git a/awx/main/tests/functional/api/test_unified_jobs_stdout.py b/awx/main/tests/functional/api/test_unified_jobs_stdout.py index acfc7a0459..dad55c5ba0 100644 --- a/awx/main/tests/functional/api/test_unified_jobs_stdout.py +++ b/awx/main/tests/functional/api/test_unified_jobs_stdout.py @@ -3,11 +3,12 @@ import base64 import json import re -from datetime import datetime +from unittest import mock from django.conf import settings from django.utils.encoding import smart_str -from unittest import mock +from django.utils.timezone import now as tz_now + import pytest from awx.api.versioning import reverse @@ -146,7 +147,7 @@ def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, ge @pytest.mark.django_db def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin): - created = datetime.utcnow() + created = tz_now() job = SystemJob(created=created) job.save() for i in range(3): @@ -158,7 +159,7 @@ def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin): @pytest.mark.django_db def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin): - created = datetime.utcnow() + created = tz_now() job = SystemJob(created=created) job.save() total_bytes = settings.STDOUT_MAX_BYTES_DISPLAY + 1 @@ -185,7 +186,7 @@ def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin): @pytest.mark.parametrize('fmt', ['txt', 'ansi']) @mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fmt, get, admin): - created = datetime.utcnow() + created = tz_now() job = Parent(created=created) job.save() total_bytes = settings.STDOUT_MAX_BYTES_DISPLAY + 1 @@ -267,7 +268,7 @@ def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, v @pytest.mark.django_db def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin): - created = datetime.utcnow() + created = tz_now() job = Job(created=created) job.save() for i in range(3): diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 200a027e36..69a7f03c33 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -788,7 +788,7 @@ class TestJobCredentials(TestJobExecution): password_prompts = task.get_password_prompts(passwords) expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) - assert expect_passwords['Vault password:\s*?$'] == 'vault-me' # noqa + assert expect_passwords[r'Vault password:\s*?$'] == 'vault-me' # noqa assert '--ask-vault-pass' in ' '.join(args) def test_vault_password_ask(self, private_data_dir, job, mock_me): @@ -803,7 +803,7 @@ class TestJobCredentials(TestJobExecution): password_prompts = task.get_password_prompts(passwords) expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) - assert expect_passwords['Vault password:\s*?$'] == 'provided-at-launch' # noqa + assert expect_passwords[r'Vault password:\s*?$'] == 'provided-at-launch' # noqa assert '--ask-vault-pass' in ' '.join(args) def test_multi_vault_password(self, private_data_dir, job, mock_me): @@ -820,10 +820,10 @@ class TestJobCredentials(TestJobExecution): expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) vault_passwords = dict((k, v) for k, v in expect_passwords.items() if 'Vault' in k) - assert vault_passwords['Vault password \(prod\):\\s*?$'] == 'pass@prod' # noqa - assert vault_passwords['Vault password \(dev\):\\s*?$'] == 'pass@dev' # noqa - assert vault_passwords['Vault password \(dotted.name\):\\s*?$'] == 'pass@dotted.name' # noqa - assert vault_passwords['Vault password:\\s*?$'] == '' # noqa + assert vault_passwords[r'Vault password \(prod\):\s*?$'] == 'pass@prod' # noqa + assert vault_passwords[r'Vault password \(dev\):\s*?$'] == 'pass@dev' # noqa + assert vault_passwords[r'Vault password \(dotted.name\):\s*?$'] == 'pass@dotted.name' # noqa + assert vault_passwords[r'Vault password:\s*?$'] == '' # noqa assert '--ask-vault-pass' not in ' '.join(args) assert '--vault-id dev@prompt' in ' '.join(args) assert '--vault-id prod@prompt' in ' '.join(args) @@ -855,9 +855,9 @@ class TestJobCredentials(TestJobExecution): expect_passwords = task.create_expect_passwords_data_struct(password_prompts, passwords) vault_passwords = dict((k, v) for k, v in expect_passwords.items() if 'Vault' in k) - assert vault_passwords['Vault password \(prod\):\\s*?$'] == 'provided-at-launch@prod' # noqa - assert vault_passwords['Vault password \(dev\):\\s*?$'] == 'provided-at-launch@dev' # noqa - assert vault_passwords['Vault password:\\s*?$'] == '' # noqa + assert vault_passwords[r'Vault password \(prod\):\s*?$'] == 'provided-at-launch@prod' # noqa + assert vault_passwords[r'Vault password \(dev\):\s*?$'] == 'provided-at-launch@dev' # noqa + assert vault_passwords[r'Vault password:\s*?$'] == '' # noqa assert '--ask-vault-pass' not in ' '.join(args) assert '--vault-id dev@prompt' in ' '.join(args) assert '--vault-id prod@prompt' in ' '.join(args) diff --git a/awx/main/validators.py b/awx/main/validators.py index 4cd0e25459..751d38060b 100644 --- a/awx/main/validators.py +++ b/awx/main/validators.py @@ -198,7 +198,7 @@ def vars_validate_or_raise(vars_str): def validate_container_image_name(value): - """ + r""" from https://github.com/distribution/distribution/blob/af8ac809336c2316c81b08605d92d94f8670ad15/reference/reference.go#L4 Grammar diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index fcd422a3dd..75a49862a1 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -152,7 +152,8 @@ SITE_ID = 1 # Make this unique, and don't share it with anybody. if os.path.exists('/etc/tower/SECRET_KEY'): - SECRET_KEY = open('/etc/tower/SECRET_KEY', 'rb').read().strip() + with open('/etc/tower/SECRET_KEY', 'rb') as f: + SECRET_KEY = f.read().strip() else: SECRET_KEY = base64.encodebytes(os.urandom(32)).decode().rstrip() diff --git a/awx/sso/fields.py b/awx/sso/fields.py index e2c87d97f2..9ad016f594 100644 --- a/awx/sso/fields.py +++ b/awx/sso/fields.py @@ -457,7 +457,7 @@ class LDAPGroupTypeField(fields.ChoiceField, DependsOnMixin): params = self.get_depends_on() or {} params_sanitized = dict() - cls_args = inspect.getargspec(cls.__init__).args[1:] + cls_args = inspect.getfullargspec(cls.__init__).args[1:] if cls_args: if not isinstance(params, dict): @@ -488,7 +488,7 @@ class LDAPGroupTypeParamsField(fields.DictField, DependsOnMixin): # Fail safe return {} - invalid_keys = set(value.keys()) - set(inspect.getargspec(group_type_cls.__init__).args[1:]) + invalid_keys = set(value.keys()) - set(inspect.getfullargspec(group_type_cls.__init__).args[1:]) if invalid_keys: invalid_keys = sorted(list(invalid_keys)) keys_display = json.dumps(invalid_keys).lstrip('[').rstrip(']') @@ -583,11 +583,11 @@ class SocialMapField(fields.ListField): def to_representation(self, value): if isinstance(value, (list, tuple)): return super(SocialMapField, self).to_representation(value) - elif value in fields.NullBooleanField.TRUE_VALUES: + elif value in fields.BooleanField.TRUE_VALUES: return True - elif value in fields.NullBooleanField.FALSE_VALUES: + elif value in fields.BooleanField.FALSE_VALUES: return False - elif value in fields.NullBooleanField.NULL_VALUES: + elif value in fields.BooleanField.NULL_VALUES: return None elif isinstance(value, (str, type(re.compile('')))): return self.child.to_representation(value) @@ -597,11 +597,11 @@ class SocialMapField(fields.ListField): def to_internal_value(self, data): if isinstance(data, (list, tuple)): return super(SocialMapField, self).to_internal_value(data) - elif data in fields.NullBooleanField.TRUE_VALUES: + elif data in fields.BooleanField.TRUE_VALUES: return True - elif data in fields.NullBooleanField.FALSE_VALUES: + elif data in fields.BooleanField.FALSE_VALUES: return False - elif data in fields.NullBooleanField.NULL_VALUES: + elif data in fields.BooleanField.NULL_VALUES: return None elif isinstance(data, str): return self.child.run_validation(data) diff --git a/awx/ui/fields.py b/awx/ui/fields.py index 2200de3417..37089c0265 100644 --- a/awx/ui/fields.py +++ b/awx/ui/fields.py @@ -16,7 +16,7 @@ from awx.conf import fields class PendoTrackingStateField(fields.ChoiceField): def to_internal_value(self, data): # Any false/null values get converted to 'off'. - if data in fields.NullBooleanField.FALSE_VALUES or data in fields.NullBooleanField.NULL_VALUES: + if data in fields.BooleanField.FALSE_VALUES or data in fields.BooleanField.NULL_VALUES: return 'off' return super(PendoTrackingStateField, self).to_internal_value(data) diff --git a/pytest.ini b/pytest.ini index d5d7273433..d4ffd2db9b 100644 --- a/pytest.ini +++ b/pytest.ini @@ -12,4 +12,5 @@ markers = job_permissions: activity_stream_access: job_runtime_vars: + fixture_args: junit_family=xunit2 From 65d17fb316156e9374566a474db2f00cfef6e1c8 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 2 Feb 2022 13:18:47 -0500 Subject: [PATCH 082/125] Upgrade to Django 3.1 - upgrades - Django 3.1.14 - djangorestframework 3.13.1 (from 3.12.1) - djangorestframework-yaml 2.0.0 (from 1.0.3) - django-polymorphic 3.1.0 (from 2.1.2) - social-auth-core 4.2.0 (from 3.3.1) - social-auth-app-django 5.0.0 (from 3.1.0) - django-auth-ldap 4.0.0 (from 2.1.0) - django-crum 0.7.9 (from 0.7.5) - django-jsonfield is removed --- docs/licenses/django-jsonfield.txt | 23 ------- ...{jaraco.classes.txt => jaraco-classes.txt} | 0 ...collections.txt => jaraco-collections.txt} | 0 ...aco.functools.txt => jaraco-functools.txt} | 0 ...{jaraco.logging.txt => jaraco-logging.txt} | 0 .../{jaraco.stream.txt => jaraco-stream.txt} | 0 .../{jaraco.text.txt => jaraco-text.txt} | 0 .../{ruamel.yaml.txt => ruamel-yaml.txt} | 0 .../{tacacs_plus.txt => tacacs-plus.txt} | 0 ...{zope.interface.txt => zope-interface.txt} | 0 requirements/requirements.in | 9 ++- requirements/requirements.txt | 65 +++++++++---------- 12 files changed, 34 insertions(+), 63 deletions(-) delete mode 100644 docs/licenses/django-jsonfield.txt rename docs/licenses/{jaraco.classes.txt => jaraco-classes.txt} (100%) rename docs/licenses/{jaraco.collections.txt => jaraco-collections.txt} (100%) rename docs/licenses/{jaraco.functools.txt => jaraco-functools.txt} (100%) rename docs/licenses/{jaraco.logging.txt => jaraco-logging.txt} (100%) rename docs/licenses/{jaraco.stream.txt => jaraco-stream.txt} (100%) rename docs/licenses/{jaraco.text.txt => jaraco-text.txt} (100%) rename docs/licenses/{ruamel.yaml.txt => ruamel-yaml.txt} (100%) rename docs/licenses/{tacacs_plus.txt => tacacs-plus.txt} (100%) rename docs/licenses/{zope.interface.txt => zope-interface.txt} (100%) diff --git a/docs/licenses/django-jsonfield.txt b/docs/licenses/django-jsonfield.txt deleted file mode 100644 index d869f756e8..0000000000 --- a/docs/licenses/django-jsonfield.txt +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) 2012, Matthew Schinckel. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * The names of its contributors may not be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL MATTHEW SCHINCKEL BE LIABLE FOR ANY DIRECT, -INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF -ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/licenses/jaraco.classes.txt b/docs/licenses/jaraco-classes.txt similarity index 100% rename from docs/licenses/jaraco.classes.txt rename to docs/licenses/jaraco-classes.txt diff --git a/docs/licenses/jaraco.collections.txt b/docs/licenses/jaraco-collections.txt similarity index 100% rename from docs/licenses/jaraco.collections.txt rename to docs/licenses/jaraco-collections.txt diff --git a/docs/licenses/jaraco.functools.txt b/docs/licenses/jaraco-functools.txt similarity index 100% rename from docs/licenses/jaraco.functools.txt rename to docs/licenses/jaraco-functools.txt diff --git a/docs/licenses/jaraco.logging.txt b/docs/licenses/jaraco-logging.txt similarity index 100% rename from docs/licenses/jaraco.logging.txt rename to docs/licenses/jaraco-logging.txt diff --git a/docs/licenses/jaraco.stream.txt b/docs/licenses/jaraco-stream.txt similarity index 100% rename from docs/licenses/jaraco.stream.txt rename to docs/licenses/jaraco-stream.txt diff --git a/docs/licenses/jaraco.text.txt b/docs/licenses/jaraco-text.txt similarity index 100% rename from docs/licenses/jaraco.text.txt rename to docs/licenses/jaraco-text.txt diff --git a/docs/licenses/ruamel.yaml.txt b/docs/licenses/ruamel-yaml.txt similarity index 100% rename from docs/licenses/ruamel.yaml.txt rename to docs/licenses/ruamel-yaml.txt diff --git a/docs/licenses/tacacs_plus.txt b/docs/licenses/tacacs-plus.txt similarity index 100% rename from docs/licenses/tacacs_plus.txt rename to docs/licenses/tacacs-plus.txt diff --git a/docs/licenses/zope.interface.txt b/docs/licenses/zope-interface.txt similarity index 100% rename from docs/licenses/zope.interface.txt rename to docs/licenses/zope-interface.txt diff --git a/requirements/requirements.in b/requirements/requirements.in index d60639aa1b..d7ace60d6b 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -9,13 +9,12 @@ cryptography>=3.2 Cython<3 # Since the bump to PyYAML 5.4.1 this is now a mandatory dep daphne distro -django==3.0.14 # see UPGRADE BLOCKERs +django==3.1.14 # see UPGRADE BLOCKERs django-auth-ldap django-cors-headers>=3.5.0 django-crum django-extensions>=2.2.9 # https://github.com/ansible/awx/pull/6441 django-guid==2.2.1 # see https://pypi.org/project/django-guid/ for supported versions -django-jsonfield==1.4.1 django-oauth-toolkit==1.4.1 django-polymorphic django-pglocks @@ -25,7 +24,7 @@ django-redis django-solo django-split-settings django-taggit -djangorestframework>=3.12.1 +djangorestframework==3.13.1 djangorestframework-yaml GitPython>=3.1.1 # minimum to fix https://github.com/ansible/awx/issues/6119 irc @@ -47,8 +46,8 @@ python-ldap>=3.3.1 # https://github.com/python-ldap/python-ldap/issues/270 pyyaml>=5.4.1 # minimum to fix https://github.com/yaml/pyyaml/issues/478 receptorctl==1.1.1 schedule==0.6.0 -social-auth-core==3.3.1 # see UPGRADE BLOCKERs -social-auth-app-django==3.1.0 # see UPGRADE BLOCKERs +social-auth-core==4.2.0 # see UPGRADE BLOCKERs +social-auth-app-django==5.0.0 # see UPGRADE BLOCKERs redis requests slack-sdk diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 2e16a46797..8ce2162d25 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -9,7 +9,7 @@ ansiconv==1.0.0 # via -r /awx_devel/requirements/requirements.in asciichartpy==1.5.25 # via -r /awx_devel/requirements/requirements.in -asgiref==3.2.5 +asgiref==3.5.0 # via # channels # channels-redis @@ -86,7 +86,7 @@ dictdiffer==0.8.1 # via openshift distro==1.5.0 # via -r /awx_devel/requirements/requirements.in -django==3.0.14 +django==3.1.14 # via # -r /awx_devel/requirements/requirements.in # channels @@ -94,28 +94,25 @@ django==3.0.14 # django-cors-headers # django-crum # django-guid - # django-jsonfield # django-oauth-toolkit # django-polymorphic # django-taggit # djangorestframework -django-auth-ldap==2.1.0 +django-auth-ldap==4.0.0 # via -r /awx_devel/requirements/requirements.in django-cors-headers==3.7.0 # via -r /awx_devel/requirements/requirements.in -django-crum==0.7.5 +django-crum==0.7.9 # via -r /awx_devel/requirements/requirements.in django-extensions==2.2.9 # via -r /awx_devel/requirements/requirements.in django-guid==2.2.1 # via -r /awx_devel/requirements/requirements.in -django-jsonfield==1.4.1 - # via -r /awx_devel/requirements/requirements.in django-oauth-toolkit==1.4.1 # via -r /awx_devel/requirements/requirements.in django-pglocks==1.0.4 # via -r /awx_devel/requirements/requirements.in -django-polymorphic==2.1.2 +django-polymorphic==3.1.0 # via -r /awx_devel/requirements/requirements.in django-qsstats-magic==1.1.0 # via -r /awx_devel/requirements/requirements.in @@ -129,9 +126,9 @@ django-split-settings==1.0.0 # via -r /awx_devel/requirements/requirements.in django-taggit==1.2.0 # via -r /awx_devel/requirements/requirements.in -djangorestframework==3.12.1 +djangorestframework==3.13.1 # via -r /awx_devel/requirements/requirements.in -djangorestframework-yaml==1.0.3 +djangorestframework-yaml==2.0.0 # via -r /awx_devel/requirements/requirements.in docutils==0.16 # via python-daemon @@ -163,23 +160,23 @@ isodate==0.6.1 # via # msrest # python3-saml -jaraco.classes==3.1.0 - # via jaraco.collections -jaraco.collections==3.0.0 +jaraco-classes==3.1.0 + # via jaraco-collections +jaraco-collections==3.0.0 # via irc -jaraco.functools==3.0.0 +jaraco-functools==3.0.0 # via # irc - # jaraco.text + # jaraco-text # tempora -jaraco.logging==3.0.0 +jaraco-logging==3.0.0 # via irc -jaraco.stream==3.0.0 +jaraco-stream==3.0.0 # via irc -jaraco.text==3.2.0 +jaraco-text==3.2.0 # via # irc - # jaraco.collections + # jaraco-collections jinja2==3.0.3 # via # -r /awx_devel/requirements/requirements.in @@ -203,8 +200,8 @@ markupsafe==2.0.1 more-itertools==8.2.0 # via # irc - # jaraco.classes - # jaraco.functools + # jaraco-classes + # jaraco-functools msgpack==1.0.0 # via channels-redis msrest==0.6.11 @@ -259,7 +256,7 @@ pygerduty==0.38.2 # via -r /awx_devel/requirements/requirements.in pyhamcrest==2.0.2 # via twisted -pyjwt==1.7.1 +pyjwt==2.3.0 # via # adal # social-auth-core @@ -298,6 +295,7 @@ python3-saml==1.13.0 pytz==2019.3 # via # django + # djangorestframework # irc # tempora # twilio @@ -334,7 +332,7 @@ requests-oauthlib==1.3.0 # social-auth-core rsa==4.7.2 # via google-auth -ruamel.yaml==0.16.10 +ruamel-yaml==0.16.10 # via openshift schedule==0.6.0 # via -r /awx_devel/requirements/requirements.in @@ -346,13 +344,12 @@ six==1.14.0 # automat # cryptography # django-extensions - # django-jsonfield # django-pglocks # google-auth # isodate - # jaraco.collections - # jaraco.logging - # jaraco.text + # jaraco-collections + # jaraco-logging + # jaraco-text # jsonschema # kubernetes # openshift @@ -361,8 +358,6 @@ six==1.14.0 # pyrad # pyrsistent # python-dateutil - # social-auth-app-django - # social-auth-core # tacacs-plus # twilio # websocket-client @@ -370,20 +365,20 @@ slack-sdk==3.11.2 # via -r /awx_devel/requirements/requirements.in smmap==3.0.1 # via gitdb -social-auth-app-django==3.1.0 +social-auth-app-django==5.0.0 # via -r /awx_devel/requirements/requirements.in -social-auth-core==3.3.1 +social-auth-core==4.2.0 # via # -r /awx_devel/requirements/requirements.in # social-auth-app-django sqlparse==0.3.1 # via django -tacacs_plus==1.0 +tacacs-plus==1.0 # via -r /awx_devel/requirements/requirements.in tempora==2.1.0 # via # irc - # jaraco.logging + # jaraco-logging twilio==6.37.0 # via -r /awx_devel/requirements/requirements.in twisted[tls]==20.3.0 @@ -410,7 +405,7 @@ xmlsec==1.3.12 # via python3-saml yarl==1.4.2 # via aiohttp -zope.interface==5.0.0 +zope-interface==5.0.0 # via twisted # The following packages are considered to be unsafe in a requirements file: @@ -425,4 +420,4 @@ setuptools==58.2.0 # kubernetes # markdown # python-daemon - # zope.interface + # zope-interface From 05142a779d4e4c89afefaacfaa55c2a8ad82ecb4 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 3 Feb 2022 10:29:08 -0500 Subject: [PATCH 083/125] Replace all usage of customized json fields with the Django builtin The event_data field on event models, however, is getting an overridden version that retains the underlying text data type for the column, to avoid a heavy data migration on those tables. Also, certain of the larger tables are getting these fields with the NOT NULL constraint turned off, to avoid a long migration. Remove the django.utils.six monkey patch we did at the beginning of the upgrade. --- awx/__init__.py | 5 -- awx/api/metadata.py | 3 +- awx/api/serializers.py | 4 +- awx/conf/migrations/0001_initial.py | 3 +- .../migrations/0003_v310_JSONField_changes.py | 5 +- awx/conf/models.py | 3 +- awx/main/fields.py | 53 +++++-------------- awx/main/migrations/0001_initial.py | 11 ++-- .../migrations/0002_squashed_v300_release.py | 9 ++-- .../migrations/0004_squashed_v310_release.py | 49 ++++++++--------- awx/main/migrations/0006_v320_release.py | 10 ++-- ...2_add_setting_field_for_activity_stream.py | 5 +- .../0014_v330_saved_launchtime_configs.py | 18 +++---- .../0018_v330_add_additional_stdout_events.py | 9 ++-- .../0020_v330_instancegroup_policies.py | 5 +- ...8_v330_add_deleted_activitystream_actor.py | 6 +-- .../0053_v340_workflow_inventory.py | 2 +- ..._v360_add_notificationtemplate_messages.py | 3 +- awx/main/migrations/0090_v360_WFJT_prompts.py | 2 +- .../0129_unifiedjob_installed_collections.py | 5 +- awx/main/migrations/_squashed_30.py | 3 +- awx/main/models/activity_stream.py | 5 +- awx/main/models/events.py | 12 ++--- awx/main/models/ha.py | 5 +- awx/main/models/inventory.py | 3 +- awx/main/models/jobs.py | 16 +++--- awx/main/models/mixins.py | 14 ++--- awx/main/models/notifications.py | 7 ++- awx/main/models/projects.py | 9 ++-- awx/main/models/unified_jobs.py | 9 ++-- awx/main/models/workflow.py | 6 +-- awx/main/tests/factories/fixtures.py | 8 +-- awx/main/tests/functional/conftest.py | 6 --- awx/main/tests/unit/utils/test_filters.py | 34 ------------ awx/main/utils/filters.py | 4 +- requirements/README.md | 11 ---- 36 files changed, 122 insertions(+), 240 deletions(-) diff --git a/awx/__init__.py b/awx/__init__.py index 31806538c2..eae7df87bd 100644 --- a/awx/__init__.py +++ b/awx/__init__.py @@ -6,8 +6,6 @@ import os import sys import warnings -import six - from pkg_resources import get_distribution __version__ = get_distribution('awx').version @@ -37,9 +35,6 @@ else: from django.db.models import indexes from django.db.backends.utils import names_digest from django.db import connection - from django import utils - - utils.six = six # FIXME: monkey patch to get us through for now if HAS_DJANGO is True: diff --git a/awx/api/metadata.py b/awx/api/metadata.py index efc3f8b09e..b4c75d09cb 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -6,6 +6,7 @@ from uuid import UUID # Django from django.core.exceptions import PermissionDenied +from django.db.models import JSONField from django.db.models.fields import PositiveIntegerField, BooleanField from django.db.models.fields.related import ForeignKey from django.http import Http404 @@ -22,7 +23,7 @@ from rest_framework.request import clone_request # AWX from awx.api.fields import ChoiceNullField -from awx.main.fields import JSONField, ImplicitRoleField +from awx.main.fields import ImplicitRoleField from awx.main.models import NotificationTemplate from awx.main.utils.execution_environments import get_default_pod_spec diff --git a/awx/api/serializers.py b/awx/api/serializers.py index a63e651840..f58d537104 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -97,7 +97,7 @@ from awx.main.models import ( ) from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES from awx.main.models.rbac import get_roles_on_resource, role_summary_fields_generator -from awx.main.fields import ImplicitRoleField, JSONBField +from awx.main.fields import ImplicitRoleField from awx.main.utils import ( get_type_for_model, get_model_for_type, @@ -1718,7 +1718,7 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables): def validate_host_filter(self, host_filter): if host_filter: try: - for match in JSONBField.get_lookups().keys(): + for match in models.JSONField.get_lookups().keys(): if match == 'exact': # __exact is allowed continue diff --git a/awx/conf/migrations/0001_initial.py b/awx/conf/migrations/0001_initial.py index 8bb9b6bcec..b239f5e143 100644 --- a/awx/conf/migrations/0001_initial.py +++ b/awx/conf/migrations/0001_initial.py @@ -2,7 +2,6 @@ from __future__ import unicode_literals from django.db import migrations, models -import jsonfield.fields from django.conf import settings @@ -18,7 +17,7 @@ class Migration(migrations.Migration): ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), ('key', models.CharField(max_length=255)), - ('value', jsonfield.fields.JSONField(null=True)), + ('value', models.JSONField(null=True)), ( 'user', models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True), diff --git a/awx/conf/migrations/0003_v310_JSONField_changes.py b/awx/conf/migrations/0003_v310_JSONField_changes.py index 2550d2fff0..d312c40b1d 100644 --- a/awx/conf/migrations/0003_v310_JSONField_changes.py +++ b/awx/conf/migrations/0003_v310_JSONField_changes.py @@ -1,12 +1,11 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from django.db import migrations -import awx.main.fields +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [('conf', '0002_v310_copy_tower_settings')] - operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONField(null=True))] + operations = [migrations.AlterField(model_name='setting', name='value', field=models.JSONField(null=True))] diff --git a/awx/conf/models.py b/awx/conf/models.py index f64d8a2aab..05162436d1 100644 --- a/awx/conf/models.py +++ b/awx/conf/models.py @@ -9,7 +9,6 @@ from django.db import models # AWX from awx.main.models.base import CreatedModifiedModel, prevent_search -from awx.main.fields import JSONField from awx.main.utils import encrypt_field from awx.conf import settings_registry @@ -19,7 +18,7 @@ __all__ = ['Setting'] class Setting(CreatedModifiedModel): key = models.CharField(max_length=255) - value = JSONField(null=True) + value = models.JSONField(null=True) user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE)) def __str__(self): diff --git a/awx/main/fields.py b/awx/main/fields.py index 8f71b53c2f..83ab57f37d 100644 --- a/awx/main/fields.py +++ b/awx/main/fields.py @@ -11,7 +11,6 @@ from jinja2 import sandbox, StrictUndefined from jinja2.exceptions import UndefinedError, TemplateSyntaxError, SecurityError # Django -from django.contrib.postgres.fields import JSONField as upstream_JSONBField from django.core import exceptions as django_exceptions from django.core.serializers.json import DjangoJSONEncoder from django.db.models.signals import ( @@ -29,6 +28,7 @@ from django.db.models.fields.related_descriptors import ( create_forward_many_to_many_manager, ) from django.utils.encoding import smart_str +from django.db.models import JSONField from django.utils.functional import cached_property from django.utils.translation import gettext_lazy as _ @@ -36,9 +36,6 @@ from django.utils.translation import gettext_lazy as _ from jsonschema import Draft4Validator, FormatChecker import jsonschema.exceptions -# Django-JSONField -from jsonfield import JSONField as upstream_JSONField - # DRF from rest_framework import serializers @@ -52,9 +49,9 @@ from awx.main import utils __all__ = [ + 'JSONBlob', 'AutoOneToOneField', 'ImplicitRoleField', - 'JSONField', 'SmartFilterField', 'OrderedManyToManyField', 'update_role_parentage_for_instance', @@ -71,40 +68,9 @@ def __enum_validate__(validator, enums, instance, schema): Draft4Validator.VALIDATORS['enum'] = __enum_validate__ -class JSONField(upstream_JSONField): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.decoder_kwargs = {'cls': json.JSONDecoder} # FIXME - - def db_type(self, connection): - return 'text' - - def from_db_value(self, value, expression, connection): - if value in {'', None} and not self.null: - return {} - return super(JSONField, self).from_db_value(value, expression, connection) - - -class JSONBField(upstream_JSONBField): - def get_prep_lookup(self, lookup_type, value): - if isinstance(value, str) and value == "null": - return 'null' - return super(JSONBField, self).get_prep_lookup(lookup_type, value) - - def get_db_prep_value(self, value, connection, prepared=False): - if connection.vendor == 'sqlite': - # sqlite (which we use for tests) does not support jsonb; - if hasattr(value, 'adapted'): - value = value.adapted # FIXME: Django 3.0 uses JsonAdapter, removed in 3.1 - return json.dumps(value, cls=DjangoJSONEncoder) - return super(JSONBField, self).get_db_prep_value(value, connection, prepared) - - def from_db_value(self, value, expression, connection): - # Work around a bug in django-jsonfield - # https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos - if isinstance(value, str): - return json.loads(value) - return value +class JSONBlob(JSONField): + def get_internal_type(self): + return "TextField" # Based on AutoOneToOneField from django-annoying: @@ -391,7 +357,7 @@ class SmartFilterField(models.TextField): return super(SmartFilterField, self).get_prep_value(value) -class JSONSchemaField(JSONBField): +class JSONSchemaField(models.JSONField): """ A JSONB field that self-validates against a defined JSON schema (http://json-schema.org). This base class is intended to be overwritten by @@ -404,8 +370,13 @@ class JSONSchemaField(JSONBField): # validation empty_values = (None, '') + def __init__(self, encoder=None, decoder=None, **options): + if encoder is None: + encoder = DjangoJSONEncoder + super().__init__(encoder=encoder, decoder=decoder, **options) + def get_default(self): - return copy.deepcopy(super(JSONBField, self).get_default()) + return copy.deepcopy(super(models.JSONField, self).get_default()) def schema(self, model_instance): raise NotImplementedError() diff --git a/awx/main/migrations/0001_initial.py b/awx/main/migrations/0001_initial.py index 7ce9911546..c3dcbe36b7 100644 --- a/awx/main/migrations/0001_initial.py +++ b/awx/main/migrations/0001_initial.py @@ -7,7 +7,6 @@ from __future__ import unicode_literals from django.db import migrations, models import django.utils.timezone -import jsonfield.fields import django.db.models.deletion from django.conf import settings import taggit.managers @@ -70,7 +69,7 @@ class Migration(migrations.Migration): ], ), ), - ('event_data', jsonfield.fields.JSONField(default=dict, blank=True)), + ('event_data', awx.main.fields.JSONBlob(default=dict, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), ('counter', models.PositiveIntegerField(default=0)), @@ -433,7 +432,7 @@ class Migration(migrations.Migration): ], ), ), - ('event_data', jsonfield.fields.JSONField(default=dict, blank=True)), + ('event_data', awx.main.fields.JSONBlob(default=dict, blank=True)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), ('host_name', models.CharField(default='', max_length=1024, editable=False)), @@ -623,7 +622,7 @@ class Migration(migrations.Migration): ('dtend', models.DateTimeField(default=None, null=True, editable=False)), ('rrule', models.CharField(max_length=255)), ('next_run', models.DateTimeField(default=None, null=True, editable=False)), - ('extra_data', jsonfield.fields.JSONField(default=dict, blank=True)), + ('extra_data', models.JSONField(default=dict, null=True, blank=True)), ( 'created_by', models.ForeignKey( @@ -751,7 +750,7 @@ class Migration(migrations.Migration): ('elapsed', models.DecimalField(editable=False, max_digits=12, decimal_places=3)), ('job_args', models.TextField(default='', editable=False, blank=True)), ('job_cwd', models.CharField(default='', max_length=1024, editable=False, blank=True)), - ('job_env', jsonfield.fields.JSONField(default=dict, editable=False, blank=True)), + ('job_env', models.JSONField(default=dict, editable=False, null=True, blank=True)), ('job_explanation', models.TextField(default='', editable=False, blank=True)), ('start_args', models.TextField(default='', editable=False, blank=True)), ('result_stdout_text', models.TextField(default='', editable=False, blank=True)), @@ -1035,7 +1034,7 @@ class Migration(migrations.Migration): ('host_config_key', models.CharField(default='', max_length=1024, blank=True)), ('ask_variables_on_launch', models.BooleanField(default=False)), ('survey_enabled', models.BooleanField(default=False)), - ('survey_spec', jsonfield.fields.JSONField(default=dict, blank=True)), + ('survey_spec', models.JSONField(default=dict, blank=True)), ], options={ 'ordering': ('name',), diff --git a/awx/main/migrations/0002_squashed_v300_release.py b/awx/main/migrations/0002_squashed_v300_release.py index 2afdef1845..5f23ed566f 100644 --- a/awx/main/migrations/0002_squashed_v300_release.py +++ b/awx/main/migrations/0002_squashed_v300_release.py @@ -12,7 +12,6 @@ import django.db.models.deletion from django.conf import settings from django.utils.timezone import now -import jsonfield.fields import taggit.managers @@ -199,7 +198,7 @@ class Migration(migrations.Migration): ), ('recipients', models.TextField(default='', editable=False, blank=True)), ('subject', models.TextField(default='', editable=False, blank=True)), - ('body', jsonfield.fields.JSONField(default=dict, blank=True)), + ('body', models.JSONField(default=dict, null=True, blank=True)), ], options={ 'ordering': ('pk',), @@ -230,7 +229,7 @@ class Migration(migrations.Migration): ], ), ), - ('notification_configuration', jsonfield.fields.JSONField(default=dict)), + ('notification_configuration', models.JSONField(default=dict)), ( 'created_by', models.ForeignKey( @@ -324,9 +323,7 @@ class Migration(migrations.Migration): ('module', models.CharField(max_length=128)), ( 'facts', - awx.main.fields.JSONBField( - default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True - ), + models.JSONField(default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True), ), ( 'host', diff --git a/awx/main/migrations/0004_squashed_v310_release.py b/awx/main/migrations/0004_squashed_v310_release.py index 06fd3aeed3..c0ac0d4a04 100644 --- a/awx/main/migrations/0004_squashed_v310_release.py +++ b/awx/main/migrations/0004_squashed_v310_release.py @@ -3,7 +3,6 @@ from __future__ import unicode_literals from django.db import migrations, models import awx.main.models.notifications -import jsonfield.fields import django.db.models.deletion import awx.main.models.workflow import awx.main.fields @@ -221,7 +220,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobnode', name='char_prompts', - field=jsonfield.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', @@ -260,7 +259,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplatenode', name='char_prompts', - field=jsonfield.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplatenode', @@ -308,12 +307,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='job', name='artifacts', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', name='ancestor_artifacts', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), # Job timeout settings migrations.AddField( @@ -381,9 +380,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='project', name='playbook_files', - field=jsonfield.fields.JSONField( - default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True - ), + field=models.JSONField(default=list, help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True), ), # Job events to stdout migrations.AddField( @@ -539,7 +536,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjob', name='survey_passwords', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplate', @@ -549,85 +546,83 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplate', name='survey_spec', - field=jsonfield.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, blank=True), ), # JSON field changes migrations.AlterField( model_name='adhoccommandevent', name='event_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=awx.main.fields.JSONBlob(default=dict, blank=True), ), migrations.AlterField( model_name='job', name='artifacts', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='job', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='jobevent', name='event_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=awx.main.fields.JSONBlob(default=dict, blank=True), ), migrations.AlterField( model_name='jobtemplate', name='survey_spec', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, blank=True), ), migrations.AlterField( model_name='notification', name='body', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AlterField( model_name='notificationtemplate', name='notification_configuration', - field=awx.main.fields.JSONField(default=dict), + field=models.JSONField(default=dict), ), migrations.AlterField( model_name='project', name='playbook_files', - field=awx.main.fields.JSONField( - default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True - ), + field=models.JSONField(default=list, help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True), ), migrations.AlterField( model_name='schedule', name='extra_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AlterField( model_name='unifiedjob', name='job_env', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='workflowjob', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='workflowjobnode', name='ancestor_artifacts', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AlterField( model_name='workflowjobnode', name='char_prompts', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AlterField( model_name='workflowjobtemplate', name='survey_spec', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, blank=True), ), migrations.AlterField( model_name='workflowjobtemplatenode', name='char_prompts', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), # Job Project Update migrations.AddField( diff --git a/awx/main/migrations/0006_v320_release.py b/awx/main/migrations/0006_v320_release.py index 1f755f94ce..c05bee3eec 100644 --- a/awx/main/migrations/0006_v320_release.py +++ b/awx/main/migrations/0006_v320_release.py @@ -108,14 +108,12 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='fact', name='facts', - field=awx.main.fields.JSONBField( - default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True - ), + field=models.JSONField(default=dict, help_text='Arbitrary JSON structure of module facts captured at timestamp for a single host.', blank=True), ), migrations.AddField( model_name='host', name='ansible_facts', - field=awx.main.fields.JSONBField(default=dict, help_text='Arbitrary JSON structure of most recent ansible_facts, per-host.', blank=True), + field=models.JSONField(default=dict, help_text='Arbitrary JSON structure of most recent ansible_facts, per-host.', blank=True), ), migrations.AddField( model_name='host', @@ -177,8 +175,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='project', name='inventory_files', - field=awx.main.fields.JSONField( - default=[], + field=models.JSONField( + default=list, help_text='Suggested list of content that could be Ansible inventory in the project', verbose_name='Inventory Files', editable=False, diff --git a/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py b/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py index 3d69de2b33..56c86b19a8 100644 --- a/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py +++ b/awx/main/migrations/0009_v322_add_setting_field_for_activity_stream.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -from django.db import migrations -import awx.main.fields +from django.db import migrations, models class Migration(migrations.Migration): @@ -15,6 +14,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name='activitystream', name='setting', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), ] diff --git a/awx/main/migrations/0014_v330_saved_launchtime_configs.py b/awx/main/migrations/0014_v330_saved_launchtime_configs.py index d120166218..38c5d2b2f6 100644 --- a/awx/main/migrations/0014_v330_saved_launchtime_configs.py +++ b/awx/main/migrations/0014_v330_saved_launchtime_configs.py @@ -20,7 +20,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='schedule', name='char_prompts', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='schedule', @@ -37,7 +37,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='schedule', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', @@ -47,12 +47,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobnode', name='extra_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobnode', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplatenode', @@ -62,12 +62,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplatenode', name='extra_data', - field=awx.main.fields.JSONField(default=dict, blank=True), + field=models.JSONField(default=dict, null=True, blank=True), ), migrations.AddField( model_name='workflowjobtemplatenode', name='survey_passwords', - field=awx.main.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), # Run data migration before removing the old credential field migrations.RunPython(migration_utils.set_current_apps_for_migrations, migrations.RunPython.noop), @@ -85,9 +85,9 @@ class Migration(migrations.Migration): name='JobLaunchConfig', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('extra_data', awx.main.fields.JSONField(blank=True, default=dict)), - ('survey_passwords', awx.main.fields.JSONField(blank=True, default=dict, editable=False)), - ('char_prompts', awx.main.fields.JSONField(blank=True, default=dict)), + ('extra_data', models.JSONField(blank=True, null=True, default=dict)), + ('survey_passwords', models.JSONField(blank=True, null=True, default=dict, editable=False)), + ('char_prompts', models.JSONField(blank=True, null=True, default=dict)), ('credentials', models.ManyToManyField(related_name='joblaunchconfigs', to='main.Credential')), ( 'inventory', diff --git a/awx/main/migrations/0018_v330_add_additional_stdout_events.py b/awx/main/migrations/0018_v330_add_additional_stdout_events.py index c9b026eeb5..ad399e72bb 100644 --- a/awx/main/migrations/0018_v330_add_additional_stdout_events.py +++ b/awx/main/migrations/0018_v330_add_additional_stdout_events.py @@ -2,10 +2,11 @@ # Generated by Django 1.11.7 on 2017-12-14 15:13 from __future__ import unicode_literals -import awx.main.fields from django.db import migrations, models import django.db.models.deletion +import awx.main.fields + class Migration(migrations.Migration): @@ -20,7 +21,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('event_data', awx.main.fields.JSONField(blank=True, default=dict)), + ('event_data', awx.main.fields.JSONBlob(blank=True, default=dict)), ('uuid', models.CharField(default='', editable=False, max_length=1024)), ('counter', models.PositiveIntegerField(default=0, editable=False)), ('stdout', models.TextField(default='', editable=False)), @@ -84,7 +85,7 @@ class Migration(migrations.Migration): max_length=100, ), ), - ('event_data', awx.main.fields.JSONField(blank=True, default=dict)), + ('event_data', awx.main.fields.JSONBlob(blank=True, default=dict)), ('failed', models.BooleanField(default=False, editable=False)), ('changed', models.BooleanField(default=False, editable=False)), ('uuid', models.CharField(default='', editable=False, max_length=1024)), @@ -114,7 +115,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)), - ('event_data', awx.main.fields.JSONField(blank=True, default=dict)), + ('event_data', awx.main.fields.JSONBlob(blank=True, default=dict)), ('uuid', models.CharField(default='', editable=False, max_length=1024)), ('counter', models.PositiveIntegerField(default=0, editable=False)), ('stdout', models.TextField(default='', editable=False)), diff --git a/awx/main/migrations/0020_v330_instancegroup_policies.py b/awx/main/migrations/0020_v330_instancegroup_policies.py index e2dc677b44..0577f14ee9 100644 --- a/awx/main/migrations/0020_v330_instancegroup_policies.py +++ b/awx/main/migrations/0020_v330_instancegroup_policies.py @@ -3,7 +3,6 @@ from __future__ import unicode_literals from django.db import migrations, models from decimal import Decimal -import awx.main.fields class Migration(migrations.Migration): @@ -16,8 +15,8 @@ class Migration(migrations.Migration): migrations.AddField( model_name='instancegroup', name='policy_instance_list', - field=awx.main.fields.JSONField( - default=[], help_text='List of exact-match Instances that will always be automatically assigned to this group', blank=True + field=models.JSONField( + default=list, help_text='List of exact-match Instances that will always be automatically assigned to this group', blank=True ), ), migrations.AddField( diff --git a/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py b/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py index 2f856e23f5..504fa14eb3 100644 --- a/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py +++ b/awx/main/migrations/0038_v330_add_deleted_activitystream_actor.py @@ -2,9 +2,7 @@ # Generated by Django 1.11.11 on 2018-05-21 19:51 from __future__ import unicode_literals -import awx.main.fields -import awx.main.models.activity_stream -from django.db import migrations +from django.db import models, migrations class Migration(migrations.Migration): @@ -17,6 +15,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name='activitystream', name='deleted_actor', - field=awx.main.fields.JSONField(null=True), + field=models.JSONField(null=True), ), ] diff --git a/awx/main/migrations/0053_v340_workflow_inventory.py b/awx/main/migrations/0053_v340_workflow_inventory.py index 23bede35f7..e3dd56a3b2 100644 --- a/awx/main/migrations/0053_v340_workflow_inventory.py +++ b/awx/main/migrations/0053_v340_workflow_inventory.py @@ -17,7 +17,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjob', name='char_prompts', - field=awx.main.fields.JSONField(blank=True, default=dict), + field=models.JSONField(blank=True, null=True, default=dict), ), migrations.AddField( model_name='workflowjob', diff --git a/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py b/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py index 690989276b..c2c69bb440 100644 --- a/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py +++ b/awx/main/migrations/0085_v360_add_notificationtemplate_messages.py @@ -4,7 +4,6 @@ from __future__ import unicode_literals from django.db import migrations, models -import awx.main.fields import awx.main.models.notifications @@ -18,7 +17,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='notificationtemplate', name='messages', - field=awx.main.fields.JSONField( + field=models.JSONField( default=awx.main.models.notifications.NotificationTemplate.default_messages, help_text='Optional custom messages for notification template.', null=True, diff --git a/awx/main/migrations/0090_v360_WFJT_prompts.py b/awx/main/migrations/0090_v360_WFJT_prompts.py index 46fb497202..fdc3b85fcc 100644 --- a/awx/main/migrations/0090_v360_WFJT_prompts.py +++ b/awx/main/migrations/0090_v360_WFJT_prompts.py @@ -24,7 +24,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='workflowjobtemplate', name='char_prompts', - field=awx.main.fields.JSONField(blank=True, default=dict), + field=models.JSONField(blank=True, null=True, default=dict), ), migrations.AlterField( model_name='joblaunchconfig', diff --git a/awx/main/migrations/0129_unifiedjob_installed_collections.py b/awx/main/migrations/0129_unifiedjob_installed_collections.py index d20c9068d0..644bff4132 100644 --- a/awx/main/migrations/0129_unifiedjob_installed_collections.py +++ b/awx/main/migrations/0129_unifiedjob_installed_collections.py @@ -1,7 +1,6 @@ # Generated by Django 2.2.16 on 2021-02-16 20:27 -import awx.main.fields -from django.db import migrations +from django.db import migrations, models class Migration(migrations.Migration): @@ -14,7 +13,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='unifiedjob', name='installed_collections', - field=awx.main.fields.JSONBField( + field=models.JSONField( blank=True, default=dict, editable=False, help_text='The Collections names and versions installed in the execution environment.' ), ), diff --git a/awx/main/migrations/_squashed_30.py b/awx/main/migrations/_squashed_30.py index c604b95c37..90c2dd061b 100644 --- a/awx/main/migrations/_squashed_30.py +++ b/awx/main/migrations/_squashed_30.py @@ -2,7 +2,6 @@ from django.db import ( migrations, models, ) -import jsonfield.fields import awx.main.fields from awx.main.migrations import _save_password_keys @@ -30,7 +29,7 @@ SQUASHED_30 = { migrations.AddField( model_name='job', name='survey_passwords', - field=jsonfield.fields.JSONField(default=dict, editable=False, blank=True), + field=models.JSONField(default=dict, editable=False, null=True, blank=True), ), ], '0031_v302_migrate_survey_passwords': [ diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index a2f68af27b..aa0ab9d9d6 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -3,7 +3,6 @@ # AWX from awx.api.versioning import reverse -from awx.main.fields import JSONField from awx.main.models.base import accepts_json # Django @@ -36,7 +35,7 @@ class ActivityStream(models.Model): operation = models.CharField(max_length=13, choices=OPERATION_CHOICES) timestamp = models.DateTimeField(auto_now_add=True) changes = accepts_json(models.TextField(blank=True)) - deleted_actor = JSONField(null=True) + deleted_actor = models.JSONField(null=True) action_node = models.CharField( blank=True, default='', @@ -84,7 +83,7 @@ class ActivityStream(models.Model): o_auth2_application = models.ManyToManyField("OAuth2Application", blank=True) o_auth2_access_token = models.ManyToManyField("OAuth2AccessToken", blank=True) - setting = JSONField(blank=True) + setting = models.JSONField(default=dict, null=True, blank=True) def __str__(self): operation = self.operation if 'operation' in self.__dict__ else '_delayed_' diff --git a/awx/main/models/events.py b/awx/main/models/events.py index 0d4b60247b..f80c23d58b 100644 --- a/awx/main/models/events.py +++ b/awx/main/models/events.py @@ -15,8 +15,8 @@ from django.utils.encoding import force_str from awx.api.versioning import reverse from awx.main import consumers +from awx.main.fields import JSONBlob from awx.main.managers import DeferJobCreatedManager -from awx.main.fields import JSONField from awx.main.constants import MINIMAL_EVENTS from awx.main.models.base import CreatedModifiedModel from awx.main.utils import ignore_inventory_computed_fields, camelcase_to_underscore @@ -209,10 +209,7 @@ class BasePlaybookEvent(CreatedModifiedModel): max_length=100, choices=EVENT_CHOICES, ) - event_data = JSONField( - blank=True, - default=dict, - ) + event_data = JSONBlob(default=dict, blank=True) failed = models.BooleanField( default=False, editable=False, @@ -648,10 +645,7 @@ class BaseCommandEvent(CreatedModifiedModel): class Meta: abstract = True - event_data = JSONField( - blank=True, - default=dict, - ) + event_data = JSONBlob(default=dict, blank=True) uuid = models.CharField( max_length=1024, default='', diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index 6182b2ce7e..b9d85559aa 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -19,7 +19,6 @@ from solo.models import SingletonModel from awx import __version__ as awx_application_version from awx.api.versioning import reverse from awx.main.managers import InstanceManager, InstanceGroupManager, UUID_DEFAULT -from awx.main.fields import JSONField from awx.main.constants import JOB_FOLDER_PREFIX from awx.main.models.base import BaseModel, HasEditsMixin, prevent_search from awx.main.models.unified_jobs import UnifiedJob @@ -322,8 +321,8 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin): ) policy_instance_percentage = models.IntegerField(default=0, help_text=_("Percentage of Instances to automatically assign to this group")) policy_instance_minimum = models.IntegerField(default=0, help_text=_("Static minimum number of Instances to automatically assign to this group")) - policy_instance_list = JSONField( - default=[], blank=True, help_text=_("List of exact-match Instances that will always be automatically assigned to this group") + policy_instance_list = models.JSONField( + default=list, blank=True, help_text=_("List of exact-match Instances that will always be automatically assigned to this group") ) POLICY_FIELDS = frozenset(('policy_instance_list', 'policy_instance_minimum', 'policy_instance_percentage')) diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 7e278dd208..3b7945c965 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -29,7 +29,6 @@ from awx.main.constants import CLOUD_PROVIDERS from awx.main.consumers import emit_channel_notification from awx.main.fields import ( ImplicitRoleField, - JSONBField, SmartFilterField, OrderedManyToManyField, ) @@ -488,7 +487,7 @@ class Host(CommonModelNameNotUnique, RelatedJobsMixin): editable=False, help_text=_('Inventory source(s) that created or modified this host.'), ) - ansible_facts = JSONBField( + ansible_facts = models.JSONField( blank=True, default=dict, help_text=_('Arbitrary JSON structure of most recent ansible_facts, per-host.'), diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index c2bc72a7eb..3b22ecd02c 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -44,7 +44,7 @@ from awx.main.models.notifications import ( JobNotificationMixin, ) from awx.main.utils import parse_yaml_or_json, getattr_dne, NullablePromptPseudoField -from awx.main.fields import ImplicitRoleField, JSONField, AskForField +from awx.main.fields import ImplicitRoleField, AskForField from awx.main.models.mixins import ( ResourceMixin, SurveyJobTemplateMixin, @@ -546,9 +546,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana editable=False, through='JobHostSummary', ) - artifacts = JSONField( - blank=True, + artifacts = models.JSONField( default=dict, + null=True, + blank=True, editable=False, ) scm_revision = models.CharField( @@ -885,7 +886,7 @@ class LaunchTimeConfigBase(BaseModel): ) # All standard fields are stored in this dictionary field # This is a solution to the nullable CharField problem, specific to prompting - char_prompts = JSONField(blank=True, default=dict) + char_prompts = models.JSONField(default=dict, null=True, blank=True) def prompts_dict(self, display=False): data = {} @@ -938,12 +939,13 @@ class LaunchTimeConfig(LaunchTimeConfigBase): abstract = True # Special case prompting fields, even more special than the other ones - extra_data = JSONField(blank=True, default=dict) + extra_data = models.JSONField(default=dict, null=True, blank=True) survey_passwords = prevent_search( - JSONField( - blank=True, + models.JSONField( default=dict, editable=False, + null=True, + blank=True, ) ) # Credentials needed for non-unified job / unified JT models diff --git a/awx/main/models/mixins.py b/awx/main/models/mixins.py index a5bb14b5a8..94e737859b 100644 --- a/awx/main/models/mixins.py +++ b/awx/main/models/mixins.py @@ -24,7 +24,7 @@ from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_lice from awx.main.utils.execution_environments import get_default_execution_environment from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted from awx.main.utils.polymorphic import build_polymorphic_ctypes_map -from awx.main.fields import JSONField, AskForField +from awx.main.fields import AskForField from awx.main.constants import ACTIVE_STATES @@ -103,12 +103,7 @@ class SurveyJobTemplateMixin(models.Model): survey_enabled = models.BooleanField( default=False, ) - survey_spec = prevent_search( - JSONField( - blank=True, - default=dict, - ) - ) + survey_spec = prevent_search(models.JSONField(default=dict, blank=True)) ask_variables_on_launch = AskForField(blank=True, default=False, allows_field='extra_vars') def survey_password_variables(self): @@ -370,10 +365,11 @@ class SurveyJobMixin(models.Model): abstract = True survey_passwords = prevent_search( - JSONField( - blank=True, + models.JSONField( default=dict, editable=False, + null=True, + blank=True, ) ) diff --git a/awx/main/models/notifications.py b/awx/main/models/notifications.py index d73591070c..9bfd1bc6b5 100644 --- a/awx/main/models/notifications.py +++ b/awx/main/models/notifications.py @@ -28,7 +28,6 @@ from awx.main.notifications.mattermost_backend import MattermostBackend from awx.main.notifications.grafana_backend import GrafanaBackend from awx.main.notifications.rocketchat_backend import RocketChatBackend from awx.main.notifications.irc_backend import IrcBackend -from awx.main.fields import JSONField logger = logging.getLogger('awx.main.models.notifications') @@ -70,12 +69,12 @@ class NotificationTemplate(CommonModelNameNotUnique): choices=NOTIFICATION_TYPE_CHOICES, ) - notification_configuration = prevent_search(JSONField(blank=False)) + notification_configuration = prevent_search(models.JSONField(default=dict)) def default_messages(): return {'started': None, 'success': None, 'error': None, 'workflow_approval': None} - messages = JSONField(null=True, blank=True, default=default_messages, help_text=_('Optional custom messages for notification template.')) + messages = models.JSONField(null=True, blank=True, default=default_messages, help_text=_('Optional custom messages for notification template.')) def has_message(self, condition): potential_template = self.messages.get(condition, {}) @@ -237,7 +236,7 @@ class Notification(CreatedModifiedModel): default='', editable=False, ) - body = JSONField(blank=True) + body = models.JSONField(default=dict, null=True, blank=True) def get_absolute_url(self, request=None): return reverse('api:notification_detail', kwargs={'pk': self.pk}, request=request) diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 701d05d235..385674d7ab 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -38,7 +38,6 @@ from awx.main.models.rbac import ( ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR, ) -from awx.main.fields import JSONField __all__ = ['Project', 'ProjectUpdate'] @@ -294,17 +293,17 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn help_text=_('The last revision fetched by a project update'), ) - playbook_files = JSONField( + playbook_files = models.JSONField( + default=list, blank=True, - default=[], editable=False, verbose_name=_('Playbook Files'), help_text=_('List of playbooks found in the project'), ) - inventory_files = JSONField( + inventory_files = models.JSONField( + default=list, blank=True, - default=[], editable=False, verbose_name=_('Inventory Files'), help_text=_('Suggested list of content that could be Ansible inventory in the project'), diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index f08e37e06c..65804c97b0 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -54,7 +54,7 @@ from awx.main.utils import polymorphic from awx.main.constants import ACTIVE_STATES, CAN_CANCEL from awx.main.redact import UriCleaner, REPLACE_STR from awx.main.consumers import emit_channel_notification -from awx.main.fields import JSONField, JSONBField, AskForField, OrderedManyToManyField +from awx.main.fields import AskForField, OrderedManyToManyField __all__ = ['UnifiedJobTemplate', 'UnifiedJob', 'StdoutMaxBytesExceeded'] @@ -653,9 +653,10 @@ class UnifiedJob( editable=False, ) job_env = prevent_search( - JSONField( - blank=True, + models.JSONField( default=dict, + null=True, + blank=True, editable=False, ) ) @@ -704,7 +705,7 @@ class UnifiedJob( 'Credential', related_name='%(class)ss', ) - installed_collections = JSONBField( + installed_collections = models.JSONField( blank=True, default=dict, editable=False, diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index f9a91aafa7..197951ea05 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -40,7 +40,6 @@ from awx.main.models.mixins import ( from awx.main.models.jobs import LaunchTimeConfigBase, LaunchTimeConfig, JobTemplate from awx.main.models.credential import Credential from awx.main.redact import REPLACE_STR -from awx.main.fields import JSONField from awx.main.utils import schedule_task_manager @@ -232,9 +231,10 @@ class WorkflowJobNode(WorkflowNodeBase): default=None, on_delete=models.CASCADE, ) - ancestor_artifacts = JSONField( - blank=True, + ancestor_artifacts = models.JSONField( default=dict, + null=True, + blank=True, editable=False, ) do_not_run = models.BooleanField( diff --git a/awx/main/tests/factories/fixtures.py b/awx/main/tests/factories/fixtures.py index 574916a84f..200fa0f195 100644 --- a/awx/main/tests/factories/fixtures.py +++ b/awx/main/tests/factories/fixtures.py @@ -180,8 +180,8 @@ def mk_job_template( jt.project = project - jt.survey_spec = spec - if jt.survey_spec is not None: + if spec is not None: + jt.survey_spec = spec jt.survey_enabled = True if persisted: @@ -212,8 +212,8 @@ def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None, wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization, webhook_service=webhook_service) - wfjt.survey_spec = spec - if wfjt.survey_spec: + if spec: + wfjt.survey_spec = spec wfjt.survey_enabled = True if persisted: diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 7e2178ca4d..ea18b491e6 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -15,7 +15,6 @@ from django.core.serializers.json import DjangoJSONEncoder from django.db.backends.sqlite3.base import SQLiteCursorWrapper # AWX -from awx.main.fields import JSONBField from awx.main.models.projects import Project from awx.main.models.ha import Instance @@ -755,11 +754,6 @@ def get_db_prep_save(self, value, connection, **kwargs): return value -@pytest.fixture -def monkeypatch_jsonbfield_get_db_prep_save(mocker): - JSONBField.get_db_prep_save = get_db_prep_save - - @pytest.fixture def oauth_application(admin): return Application.objects.create(name='test app', user=admin, client_type='confidential', authorization_grant_type='password') diff --git a/awx/main/tests/unit/utils/test_filters.py b/awx/main/tests/unit/utils/test_filters.py index 52e37ab893..ef0abb80d3 100644 --- a/awx/main/tests/unit/utils/test_filters.py +++ b/awx/main/tests/unit/utils/test_filters.py @@ -4,7 +4,6 @@ from unittest import mock # AWX from awx.main.utils.filters import SmartFilter, ExternalLoggerEnabled -from awx.main.models import Host # Django from django.db.models import Q @@ -219,39 +218,6 @@ class TestSmartFilterQueryFromString: assert str(q) == str(q_expected) -class TestSmartFilterQueryFromStringNoDB: - @pytest.mark.parametrize( - "filter_string,q_expected", - [ - ( - 'ansible_facts__a="true" and ansible_facts__b="true" and ansible_facts__c="true"', - ( - Q(**{u"ansible_facts__contains": {u"a": u"true"}}) - & Q(**{u"ansible_facts__contains": {u"b": u"true"}}) - & Q(**{u"ansible_facts__contains": {u"c": u"true"}}) - ), - ), - ( - 'ansible_facts__a="true" or ansible_facts__b="true" or ansible_facts__c="true"', - ( - Q(**{u"ansible_facts__contains": {u"a": u"true"}}) - | Q(**{u"ansible_facts__contains": {u"b": u"true"}}) - | Q(**{u"ansible_facts__contains": {u"c": u"true"}}) - ), - ), - ('search=foo', Q(Q(**{u"description__icontains": u"foo"}) | Q(**{u"name__icontains": u"foo"}))), - ( - 'search=foo and ansible_facts__a="null"', - Q(Q(**{u"description__icontains": u"foo"}) | Q(**{u"name__icontains": u"foo"})) & Q(**{u"ansible_facts__contains": {u"a": u"\"null\""}}), - ), - ('name=foo or name=bar and name=foobar', Q(name="foo") | Q(name="bar") & Q(name="foobar")), - ], - ) - def test_does_not_invoke_db(self, filter_string, q_expected): - q = SmartFilter.query_from_string(filter_string) - assert str(q.query) == str(Host.objects.filter(q_expected).query) - - ''' #('"facts__quoted_val"="f\"oo"', 1), #('facts__facts__arr[]="foo"', 1), diff --git a/awx/main/utils/filters.py b/awx/main/utils/filters.py index 002ab957bd..7320cbc02f 100644 --- a/awx/main/utils/filters.py +++ b/awx/main/utils/filters.py @@ -188,13 +188,11 @@ class SmartFilter(object): ''' def _json_path_to_contains(self, k, v): - from awx.main.fields import JSONBField # avoid a circular import - if not k.startswith(SmartFilter.SEARCHABLE_RELATIONSHIP): v = self.strip_quotes_traditional_logic(v) return (k, v) - for match in JSONBField.get_lookups().keys(): + for match in models.JSONField.get_lookups().keys(): match = '__{}'.format(match) if k.endswith(match): if match == '__exact': diff --git a/requirements/README.md b/requirements/README.md index 5dc2638c3c..a10d412f2c 100644 --- a/requirements/README.md +++ b/requirements/README.md @@ -105,17 +105,6 @@ Upgrading to 4.0.0 causes error because imports changed. ImportError: cannot import name 'KeyVaultClient' ``` -### django-jsonfield - -Instead of calling a `loads()` operation, the returned value is casted into -a string in some cases, introduced in the change: - -https://github.com/adamchainz/django-jsonfield/pull/14 - -This breaks a very large amount of AWX code that assumes these fields -are returned as dicts. Upgrading this library will require a refactor -to accommodate this change. - ### pip and setuptools The offline installer needs to have functionality confirmed before upgrading these. From e620bef2a5ad0c4059b4a170f14638f99034314c Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 3 Feb 2022 14:27:13 -0500 Subject: [PATCH 084/125] Fix Django 3.1 deprecation removal problems - FieldDoesNotExist now has to be imported from django.core.exceptions - Django docs specifically say not to import django.conf.global_settings, which now has the side-effect of triggering one of the check errors --- awx/api/filters.py | 3 +-- awx/api/generics.py | 6 +++--- awx/main/tests/unit/api/test_filters.py | 7 ++++--- awx/settings/defaults.py | 10 +--------- 4 files changed, 9 insertions(+), 17 deletions(-) diff --git a/awx/api/filters.py b/awx/api/filters.py index 5d2af58f72..18539e3b85 100644 --- a/awx/api/filters.py +++ b/awx/api/filters.py @@ -7,10 +7,9 @@ import json from functools import reduce # Django -from django.core.exceptions import FieldError, ValidationError +from django.core.exceptions import FieldError, ValidationError, FieldDoesNotExist from django.db import models from django.db.models import Q, CharField, IntegerField, BooleanField -from django.db.models.fields import FieldDoesNotExist from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.fields import GenericForeignKey diff --git a/awx/api/generics.py b/awx/api/generics.py index c3c72ba30a..f2faec5c47 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -10,18 +10,18 @@ import urllib.parse # Django from django.conf import settings +from django.contrib.auth import views as auth_views +from django.contrib.contenttypes.models import ContentType from django.core.cache import cache +from django.core.exceptions import FieldDoesNotExist from django.db import connection -from django.db.models.fields import FieldDoesNotExist from django.db.models.fields.related import OneToOneRel from django.http import QueryDict from django.shortcuts import get_object_or_404 from django.template.loader import render_to_string from django.utils.encoding import smart_str from django.utils.safestring import mark_safe -from django.contrib.contenttypes.models import ContentType from django.utils.translation import gettext_lazy as _ -from django.contrib.auth import views as auth_views # Django REST Framework from rest_framework.exceptions import PermissionDenied, AuthenticationFailed, ParseError, NotAcceptable, UnsupportedMediaType diff --git a/awx/main/tests/unit/api/test_filters.py b/awx/main/tests/unit/api/test_filters.py index c523cd2650..21e651e22b 100644 --- a/awx/main/tests/unit/api/test_filters.py +++ b/awx/main/tests/unit/api/test_filters.py @@ -2,7 +2,11 @@ import pytest +# Django +from django.core.exceptions import FieldDoesNotExist + from rest_framework.exceptions import PermissionDenied, ParseError + from awx.api.filters import FieldLookupBackend, OrderByBackend, get_field_from_path from awx.main.models import ( AdHocCommand, @@ -22,9 +26,6 @@ from awx.main.models import ( from awx.main.models.oauth import OAuth2Application from awx.main.models.jobs import JobOptions -# Django -from django.db.models.fields import FieldDoesNotExist - def test_related(): field_lookup = FieldLookupBackend() diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 75a49862a1..4c6597181e 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -7,14 +7,6 @@ import re # noqa import sys from datetime import timedelta -# global settings -from django.conf import global_settings - -# Update this module's local settings from the global settings module. -this_module = sys.modules[__name__] -for setting in dir(global_settings): - if setting == setting.upper(): - setattr(this_module, setting, getattr(global_settings, setting)) # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(__file__)) @@ -451,7 +443,7 @@ CACHES = {'default': {'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': 'u # Social Auth configuration. SOCIAL_AUTH_STRATEGY = 'social_django.strategy.DjangoStrategy' SOCIAL_AUTH_STORAGE = 'social_django.models.DjangoStorage' -SOCIAL_AUTH_USER_MODEL = AUTH_USER_MODEL # noqa +SOCIAL_AUTH_USER_MODEL = 'auth.User' _SOCIAL_AUTH_PIPELINE_BASE = ( 'social_core.pipeline.social_auth.social_details', From 1e625ed58bde4940e2129e6d43af5eb4f367fdc2 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 3 Feb 2022 17:20:48 -0500 Subject: [PATCH 085/125] Fix up deprecation warning about re_path() replacing urls() --- awx/api/metrics.py | 4 +- awx/api/urls/activity_stream.py | 6 +- awx/api/urls/ad_hoc_command.py | 18 +-- awx/api/urls/ad_hoc_command_event.py | 4 +- awx/api/urls/credential.py | 22 ++-- awx/api/urls/credential_input_source.py | 6 +- awx/api/urls/credential_type.py | 12 +- awx/api/urls/execution_environments.py | 12 +- awx/api/urls/group.py | 26 ++-- awx/api/urls/host.py | 26 ++-- awx/api/urls/instance.py | 12 +- awx/api/urls/instance_group.py | 10 +- awx/api/urls/inventory.py | 38 +++--- awx/api/urls/inventory_source.py | 26 ++-- awx/api/urls/inventory_update.py | 16 +-- awx/api/urls/job.py | 26 ++-- awx/api/urls/job_event.py | 6 +- awx/api/urls/job_host_summary.py | 4 +- awx/api/urls/job_template.py | 38 +++--- awx/api/urls/label.py | 4 +- awx/api/urls/notification.py | 7 +- awx/api/urls/notification_template.py | 12 +- awx/api/urls/oauth2.py | 16 +-- awx/api/urls/oauth2_root.py | 10 +- awx/api/urls/organization.py | 46 +++---- awx/api/urls/project.py | 36 +++--- awx/api/urls/project_update.py | 16 +-- awx/api/urls/role.py | 14 +-- awx/api/urls/schedule.py | 10 +- awx/api/urls/system_job.py | 12 +- awx/api/urls/system_job_template.py | 18 +-- awx/api/urls/team.py | 20 +-- awx/api/urls/urls.py | 134 ++++++++++----------- awx/api/urls/user.py | 30 ++--- awx/api/urls/webhooks.py | 8 +- awx/api/urls/workflow_approval.py | 10 +- awx/api/urls/workflow_approval_template.py | 6 +- awx/api/urls/workflow_job.py | 18 +-- awx/api/urls/workflow_job_node.py | 14 +-- awx/api/urls/workflow_job_template.py | 36 +++--- awx/api/urls/workflow_job_template_node.py | 16 +-- awx/conf/urls.py | 8 +- awx/main/routing.py | 6 +- awx/sso/urls.py | 11 +- awx/ui/urls.py | 4 +- awx/urls.py | 25 ++-- 46 files changed, 433 insertions(+), 426 deletions(-) diff --git a/awx/api/metrics.py b/awx/api/metrics.py index 5ed3dcabef..de66c2de33 100644 --- a/awx/api/metrics.py +++ b/awx/api/metrics.py @@ -1,11 +1,11 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import MetricsView -urls = [url(r'^$', MetricsView.as_view(), name='metrics_view')] +urls = [re_path(r'^$', MetricsView.as_view(), name='metrics_view')] __all__ = ['urls'] diff --git a/awx/api/urls/activity_stream.py b/awx/api/urls/activity_stream.py index 907f856aa8..36b64a2de5 100644 --- a/awx/api/urls/activity_stream.py +++ b/awx/api/urls/activity_stream.py @@ -1,14 +1,14 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ActivityStreamList, ActivityStreamDetail urls = [ - url(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'), - url(r'^(?P[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'), + re_path(r'^$', ActivityStreamList.as_view(), name='activity_stream_list'), + re_path(r'^(?P[0-9]+)/$', ActivityStreamDetail.as_view(), name='activity_stream_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/ad_hoc_command.py b/awx/api/urls/ad_hoc_command.py index cc1277adcf..9c723e762b 100644 --- a/awx/api/urls/ad_hoc_command.py +++ b/awx/api/urls/ad_hoc_command.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( AdHocCommandList, @@ -16,14 +16,14 @@ from awx.api.views import ( urls = [ - url(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'), - url(r'^(?P[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'), - url(r'^(?P[0-9]+)/cancel/$', AdHocCommandCancel.as_view(), name='ad_hoc_command_cancel'), - url(r'^(?P[0-9]+)/relaunch/$', AdHocCommandRelaunch.as_view(), name='ad_hoc_command_relaunch'), - url(r'^(?P[0-9]+)/events/$', AdHocCommandAdHocCommandEventsList.as_view(), name='ad_hoc_command_ad_hoc_command_events_list'), - url(r'^(?P[0-9]+)/activity_stream/$', AdHocCommandActivityStreamList.as_view(), name='ad_hoc_command_activity_stream_list'), - url(r'^(?P[0-9]+)/notifications/$', AdHocCommandNotificationsList.as_view(), name='ad_hoc_command_notifications_list'), - url(r'^(?P[0-9]+)/stdout/$', AdHocCommandStdout.as_view(), name='ad_hoc_command_stdout'), + re_path(r'^$', AdHocCommandList.as_view(), name='ad_hoc_command_list'), + re_path(r'^(?P[0-9]+)/$', AdHocCommandDetail.as_view(), name='ad_hoc_command_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', AdHocCommandCancel.as_view(), name='ad_hoc_command_cancel'), + re_path(r'^(?P[0-9]+)/relaunch/$', AdHocCommandRelaunch.as_view(), name='ad_hoc_command_relaunch'), + re_path(r'^(?P[0-9]+)/events/$', AdHocCommandAdHocCommandEventsList.as_view(), name='ad_hoc_command_ad_hoc_command_events_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', AdHocCommandActivityStreamList.as_view(), name='ad_hoc_command_activity_stream_list'), + re_path(r'^(?P[0-9]+)/notifications/$', AdHocCommandNotificationsList.as_view(), name='ad_hoc_command_notifications_list'), + re_path(r'^(?P[0-9]+)/stdout/$', AdHocCommandStdout.as_view(), name='ad_hoc_command_stdout'), ] __all__ = ['urls'] diff --git a/awx/api/urls/ad_hoc_command_event.py b/awx/api/urls/ad_hoc_command_event.py index a38d4fc165..cbfa72b8bc 100644 --- a/awx/api/urls/ad_hoc_command_event.py +++ b/awx/api/urls/ad_hoc_command_event.py @@ -1,13 +1,13 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import AdHocCommandEventDetail urls = [ - url(r'^(?P[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'), + re_path(r'^(?P[0-9]+)/$', AdHocCommandEventDetail.as_view(), name='ad_hoc_command_event_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/credential.py b/awx/api/urls/credential.py index e041e08477..f2ae8535b9 100644 --- a/awx/api/urls/credential.py +++ b/awx/api/urls/credential.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( CredentialList, @@ -18,16 +18,16 @@ from awx.api.views import ( urls = [ - url(r'^$', CredentialList.as_view(), name='credential_list'), - url(r'^(?P[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'), - url(r'^(?P[0-9]+)/$', CredentialDetail.as_view(), name='credential_detail'), - url(r'^(?P[0-9]+)/access_list/$', CredentialAccessList.as_view(), name='credential_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'), - url(r'^(?P[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'), - url(r'^(?P[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'), - url(r'^(?P[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'), - url(r'^(?P[0-9]+)/input_sources/$', CredentialInputSourceSubList.as_view(), name='credential_input_source_sublist'), - url(r'^(?P[0-9]+)/test/$', CredentialExternalTest.as_view(), name='credential_external_test'), + re_path(r'^$', CredentialList.as_view(), name='credential_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', CredentialActivityStreamList.as_view(), name='credential_activity_stream_list'), + re_path(r'^(?P[0-9]+)/$', CredentialDetail.as_view(), name='credential_detail'), + re_path(r'^(?P[0-9]+)/access_list/$', CredentialAccessList.as_view(), name='credential_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', CredentialObjectRolesList.as_view(), name='credential_object_roles_list'), + re_path(r'^(?P[0-9]+)/owner_users/$', CredentialOwnerUsersList.as_view(), name='credential_owner_users_list'), + re_path(r'^(?P[0-9]+)/owner_teams/$', CredentialOwnerTeamsList.as_view(), name='credential_owner_teams_list'), + re_path(r'^(?P[0-9]+)/copy/$', CredentialCopy.as_view(), name='credential_copy'), + re_path(r'^(?P[0-9]+)/input_sources/$', CredentialInputSourceSubList.as_view(), name='credential_input_source_sublist'), + re_path(r'^(?P[0-9]+)/test/$', CredentialExternalTest.as_view(), name='credential_external_test'), ] __all__ = ['urls'] diff --git a/awx/api/urls/credential_input_source.py b/awx/api/urls/credential_input_source.py index 6b365e5840..7ac4b3c4f4 100644 --- a/awx/api/urls/credential_input_source.py +++ b/awx/api/urls/credential_input_source.py @@ -1,14 +1,14 @@ # Copyright (c) 2019 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import CredentialInputSourceDetail, CredentialInputSourceList urls = [ - url(r'^$', CredentialInputSourceList.as_view(), name='credential_input_source_list'), - url(r'^(?P[0-9]+)/$', CredentialInputSourceDetail.as_view(), name='credential_input_source_detail'), + re_path(r'^$', CredentialInputSourceList.as_view(), name='credential_input_source_list'), + re_path(r'^(?P[0-9]+)/$', CredentialInputSourceDetail.as_view(), name='credential_input_source_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/credential_type.py b/awx/api/urls/credential_type.py index ab799c8c54..8307126351 100644 --- a/awx/api/urls/credential_type.py +++ b/awx/api/urls/credential_type.py @@ -1,17 +1,17 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import CredentialTypeList, CredentialTypeDetail, CredentialTypeCredentialList, CredentialTypeActivityStreamList, CredentialTypeExternalTest urls = [ - url(r'^$', CredentialTypeList.as_view(), name='credential_type_list'), - url(r'^(?P[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'), - url(r'^(?P[0-9]+)/credentials/$', CredentialTypeCredentialList.as_view(), name='credential_type_credential_list'), - url(r'^(?P[0-9]+)/activity_stream/$', CredentialTypeActivityStreamList.as_view(), name='credential_type_activity_stream_list'), - url(r'^(?P[0-9]+)/test/$', CredentialTypeExternalTest.as_view(), name='credential_type_external_test'), + re_path(r'^$', CredentialTypeList.as_view(), name='credential_type_list'), + re_path(r'^(?P[0-9]+)/$', CredentialTypeDetail.as_view(), name='credential_type_detail'), + re_path(r'^(?P[0-9]+)/credentials/$', CredentialTypeCredentialList.as_view(), name='credential_type_credential_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', CredentialTypeActivityStreamList.as_view(), name='credential_type_activity_stream_list'), + re_path(r'^(?P[0-9]+)/test/$', CredentialTypeExternalTest.as_view(), name='credential_type_external_test'), ] __all__ = ['urls'] diff --git a/awx/api/urls/execution_environments.py b/awx/api/urls/execution_environments.py index 99b9cb3ddc..2b4f325b1a 100644 --- a/awx/api/urls/execution_environments.py +++ b/awx/api/urls/execution_environments.py @@ -1,4 +1,4 @@ -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( ExecutionEnvironmentList, @@ -10,11 +10,11 @@ from awx.api.views import ( urls = [ - url(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'), - url(r'^(?P[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'), - url(r'^(?P[0-9]+)/unified_job_templates/$', ExecutionEnvironmentJobTemplateList.as_view(), name='execution_environment_job_template_list'), - url(r'^(?P[0-9]+)/copy/$', ExecutionEnvironmentCopy.as_view(), name='execution_environment_copy'), - url(r'^(?P[0-9]+)/activity_stream/$', ExecutionEnvironmentActivityStreamList.as_view(), name='execution_environment_activity_stream_list'), + re_path(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'), + re_path(r'^(?P[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'), + re_path(r'^(?P[0-9]+)/unified_job_templates/$', ExecutionEnvironmentJobTemplateList.as_view(), name='execution_environment_job_template_list'), + re_path(r'^(?P[0-9]+)/copy/$', ExecutionEnvironmentCopy.as_view(), name='execution_environment_copy'), + re_path(r'^(?P[0-9]+)/activity_stream/$', ExecutionEnvironmentActivityStreamList.as_view(), name='execution_environment_activity_stream_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/group.py b/awx/api/urls/group.py index 416479def6..18107cd22a 100644 --- a/awx/api/urls/group.py +++ b/awx/api/urls/group.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( GroupList, @@ -20,18 +20,18 @@ from awx.api.views import ( urls = [ - url(r'^$', GroupList.as_view(), name='group_list'), - url(r'^(?P[0-9]+)/$', GroupDetail.as_view(), name='group_detail'), - url(r'^(?P[0-9]+)/children/$', GroupChildrenList.as_view(), name='group_children_list'), - url(r'^(?P[0-9]+)/hosts/$', GroupHostsList.as_view(), name='group_hosts_list'), - url(r'^(?P[0-9]+)/all_hosts/$', GroupAllHostsList.as_view(), name='group_all_hosts_list'), - url(r'^(?P[0-9]+)/variable_data/$', GroupVariableData.as_view(), name='group_variable_data'), - url(r'^(?P[0-9]+)/job_events/$', GroupJobEventsList.as_view(), name='group_job_events_list'), - url(r'^(?P[0-9]+)/job_host_summaries/$', GroupJobHostSummariesList.as_view(), name='group_job_host_summaries_list'), - url(r'^(?P[0-9]+)/potential_children/$', GroupPotentialChildrenList.as_view(), name='group_potential_children_list'), - url(r'^(?P[0-9]+)/activity_stream/$', GroupActivityStreamList.as_view(), name='group_activity_stream_list'), - url(r'^(?P[0-9]+)/inventory_sources/$', GroupInventorySourcesList.as_view(), name='group_inventory_sources_list'), - url(r'^(?P[0-9]+)/ad_hoc_commands/$', GroupAdHocCommandsList.as_view(), name='group_ad_hoc_commands_list'), + re_path(r'^$', GroupList.as_view(), name='group_list'), + re_path(r'^(?P[0-9]+)/$', GroupDetail.as_view(), name='group_detail'), + re_path(r'^(?P[0-9]+)/children/$', GroupChildrenList.as_view(), name='group_children_list'), + re_path(r'^(?P[0-9]+)/hosts/$', GroupHostsList.as_view(), name='group_hosts_list'), + re_path(r'^(?P[0-9]+)/all_hosts/$', GroupAllHostsList.as_view(), name='group_all_hosts_list'), + re_path(r'^(?P[0-9]+)/variable_data/$', GroupVariableData.as_view(), name='group_variable_data'), + re_path(r'^(?P[0-9]+)/job_events/$', GroupJobEventsList.as_view(), name='group_job_events_list'), + re_path(r'^(?P[0-9]+)/job_host_summaries/$', GroupJobHostSummariesList.as_view(), name='group_job_host_summaries_list'), + re_path(r'^(?P[0-9]+)/potential_children/$', GroupPotentialChildrenList.as_view(), name='group_potential_children_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', GroupActivityStreamList.as_view(), name='group_activity_stream_list'), + re_path(r'^(?P[0-9]+)/inventory_sources/$', GroupInventorySourcesList.as_view(), name='group_inventory_sources_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_commands/$', GroupAdHocCommandsList.as_view(), name='group_ad_hoc_commands_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/host.py b/awx/api/urls/host.py index d06608bf86..36bbb70da4 100644 --- a/awx/api/urls/host.py +++ b/awx/api/urls/host.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( HostList, @@ -20,18 +20,18 @@ from awx.api.views import ( urls = [ - url(r'^$', HostList.as_view(), name='host_list'), - url(r'^(?P[0-9]+)/$', HostDetail.as_view(), name='host_detail'), - url(r'^(?P[0-9]+)/variable_data/$', HostVariableData.as_view(), name='host_variable_data'), - url(r'^(?P[0-9]+)/groups/$', HostGroupsList.as_view(), name='host_groups_list'), - url(r'^(?P[0-9]+)/all_groups/$', HostAllGroupsList.as_view(), name='host_all_groups_list'), - url(r'^(?P[0-9]+)/job_events/', HostJobEventsList.as_view(), name='host_job_events_list'), - url(r'^(?P[0-9]+)/job_host_summaries/$', HostJobHostSummariesList.as_view(), name='host_job_host_summaries_list'), - url(r'^(?P[0-9]+)/activity_stream/$', HostActivityStreamList.as_view(), name='host_activity_stream_list'), - url(r'^(?P[0-9]+)/inventory_sources/$', HostInventorySourcesList.as_view(), name='host_inventory_sources_list'), - url(r'^(?P[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'), - url(r'^(?P[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'), - url(r'^(?P[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'), + re_path(r'^$', HostList.as_view(), name='host_list'), + re_path(r'^(?P[0-9]+)/$', HostDetail.as_view(), name='host_detail'), + re_path(r'^(?P[0-9]+)/variable_data/$', HostVariableData.as_view(), name='host_variable_data'), + re_path(r'^(?P[0-9]+)/groups/$', HostGroupsList.as_view(), name='host_groups_list'), + re_path(r'^(?P[0-9]+)/all_groups/$', HostAllGroupsList.as_view(), name='host_all_groups_list'), + re_path(r'^(?P[0-9]+)/job_events/', HostJobEventsList.as_view(), name='host_job_events_list'), + re_path(r'^(?P[0-9]+)/job_host_summaries/$', HostJobHostSummariesList.as_view(), name='host_job_host_summaries_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', HostActivityStreamList.as_view(), name='host_activity_stream_list'), + re_path(r'^(?P[0-9]+)/inventory_sources/$', HostInventorySourcesList.as_view(), name='host_inventory_sources_list'), + re_path(r'^(?P[0-9]+)/smart_inventories/$', HostSmartInventoriesList.as_view(), name='host_smart_inventories_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_commands/$', HostAdHocCommandsList.as_view(), name='host_ad_hoc_commands_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_command_events/$', HostAdHocCommandEventsList.as_view(), name='host_ad_hoc_command_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/instance.py b/awx/api/urls/instance.py index dd75db2b21..6c70e285c5 100644 --- a/awx/api/urls/instance.py +++ b/awx/api/urls/instance.py @@ -1,17 +1,17 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList, InstanceHealthCheck urls = [ - url(r'^$', InstanceList.as_view(), name='instance_list'), - url(r'^(?P[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'), - url(r'^(?P[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'), - url(r'^(?P[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'), - url(r'^(?P[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'), + re_path(r'^$', InstanceList.as_view(), name='instance_list'), + re_path(r'^(?P[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'), + re_path(r'^(?P[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'), + re_path(r'^(?P[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'), + re_path(r'^(?P[0-9]+)/health_check/$', InstanceHealthCheck.as_view(), name='instance_health_check'), ] __all__ = ['urls'] diff --git a/awx/api/urls/instance_group.py b/awx/api/urls/instance_group.py index 3bb9eabefc..de8cf8b52a 100644 --- a/awx/api/urls/instance_group.py +++ b/awx/api/urls/instance_group.py @@ -1,16 +1,16 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import InstanceGroupList, InstanceGroupDetail, InstanceGroupUnifiedJobsList, InstanceGroupInstanceList urls = [ - url(r'^$', InstanceGroupList.as_view(), name='instance_group_list'), - url(r'^(?P[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'), - url(r'^(?P[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'), - url(r'^(?P[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'), + re_path(r'^$', InstanceGroupList.as_view(), name='instance_group_list'), + re_path(r'^(?P[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'), + re_path(r'^(?P[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'), + re_path(r'^(?P[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/inventory.py b/awx/api/urls/inventory.py index d323be9450..863591ba60 100644 --- a/awx/api/urls/inventory.py +++ b/awx/api/urls/inventory.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( InventoryList, @@ -26,24 +26,24 @@ from awx.api.views import ( urls = [ - url(r'^$', InventoryList.as_view(), name='inventory_list'), - url(r'^(?P[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'), - url(r'^(?P[0-9]+)/hosts/$', InventoryHostsList.as_view(), name='inventory_hosts_list'), - url(r'^(?P[0-9]+)/groups/$', InventoryGroupsList.as_view(), name='inventory_groups_list'), - url(r'^(?P[0-9]+)/root_groups/$', InventoryRootGroupsList.as_view(), name='inventory_root_groups_list'), - url(r'^(?P[0-9]+)/variable_data/$', InventoryVariableData.as_view(), name='inventory_variable_data'), - url(r'^(?P[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'), - url(r'^(?P[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'), - url(r'^(?P[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'), - url(r'^(?P[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'), - url(r'^(?P[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'), - url(r'^(?P[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'), - url(r'^(?P[0-9]+)/ad_hoc_commands/$', InventoryAdHocCommandsList.as_view(), name='inventory_ad_hoc_commands_list'), - url(r'^(?P[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'), - url(r'^(?P[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'), - url(r'^(?P[0-9]+)/labels/$', InventoryLabelList.as_view(), name='inventory_label_list'), - url(r'^(?P[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'), + re_path(r'^$', InventoryList.as_view(), name='inventory_list'), + re_path(r'^(?P[0-9]+)/$', InventoryDetail.as_view(), name='inventory_detail'), + re_path(r'^(?P[0-9]+)/hosts/$', InventoryHostsList.as_view(), name='inventory_hosts_list'), + re_path(r'^(?P[0-9]+)/groups/$', InventoryGroupsList.as_view(), name='inventory_groups_list'), + re_path(r'^(?P[0-9]+)/root_groups/$', InventoryRootGroupsList.as_view(), name='inventory_root_groups_list'), + re_path(r'^(?P[0-9]+)/variable_data/$', InventoryVariableData.as_view(), name='inventory_variable_data'), + re_path(r'^(?P[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'), + re_path(r'^(?P[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'), + re_path(r'^(?P[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'), + re_path(r'^(?P[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'), + re_path(r'^(?P[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'), + re_path(r'^(?P[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'), + re_path(r'^(?P[0-9]+)/ad_hoc_commands/$', InventoryAdHocCommandsList.as_view(), name='inventory_ad_hoc_commands_list'), + re_path(r'^(?P[0-9]+)/access_list/$', InventoryAccessList.as_view(), name='inventory_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', InventoryObjectRolesList.as_view(), name='inventory_object_roles_list'), + re_path(r'^(?P[0-9]+)/instance_groups/$', InventoryInstanceGroupsList.as_view(), name='inventory_instance_groups_list'), + re_path(r'^(?P[0-9]+)/labels/$', InventoryLabelList.as_view(), name='inventory_label_list'), + re_path(r'^(?P[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/inventory_source.py b/awx/api/urls/inventory_source.py index 02cf30c408..120f5022c6 100644 --- a/awx/api/urls/inventory_source.py +++ b/awx/api/urls/inventory_source.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( InventorySourceList, @@ -20,26 +20,26 @@ from awx.api.views import ( urls = [ - url(r'^$', InventorySourceList.as_view(), name='inventory_source_list'), - url(r'^(?P[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'), - url(r'^(?P[0-9]+)/update/$', InventorySourceUpdateView.as_view(), name='inventory_source_update_view'), - url(r'^(?P[0-9]+)/inventory_updates/$', InventorySourceUpdatesList.as_view(), name='inventory_source_updates_list'), - url(r'^(?P[0-9]+)/activity_stream/$', InventorySourceActivityStreamList.as_view(), name='inventory_source_activity_stream_list'), - url(r'^(?P[0-9]+)/schedules/$', InventorySourceSchedulesList.as_view(), name='inventory_source_schedules_list'), - url(r'^(?P[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'), - url(r'^(?P[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'), - url(r'^(?P[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'), - url( + re_path(r'^$', InventorySourceList.as_view(), name='inventory_source_list'), + re_path(r'^(?P[0-9]+)/$', InventorySourceDetail.as_view(), name='inventory_source_detail'), + re_path(r'^(?P[0-9]+)/update/$', InventorySourceUpdateView.as_view(), name='inventory_source_update_view'), + re_path(r'^(?P[0-9]+)/inventory_updates/$', InventorySourceUpdatesList.as_view(), name='inventory_source_updates_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', InventorySourceActivityStreamList.as_view(), name='inventory_source_activity_stream_list'), + re_path(r'^(?P[0-9]+)/schedules/$', InventorySourceSchedulesList.as_view(), name='inventory_source_schedules_list'), + re_path(r'^(?P[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'), + re_path(r'^(?P[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'), + re_path(r'^(?P[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', InventorySourceNotificationTemplatesStartedList.as_view(), name='inventory_source_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', InventorySourceNotificationTemplatesErrorList.as_view(), name='inventory_source_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', InventorySourceNotificationTemplatesSuccessList.as_view(), name='inventory_source_notification_templates_success_list', diff --git a/awx/api/urls/inventory_update.py b/awx/api/urls/inventory_update.py index 0279f8c915..6855561a72 100644 --- a/awx/api/urls/inventory_update.py +++ b/awx/api/urls/inventory_update.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( InventoryUpdateList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'), - url(r'^(?P[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'), - url(r'^(?P[0-9]+)/cancel/$', InventoryUpdateCancel.as_view(), name='inventory_update_cancel'), - url(r'^(?P[0-9]+)/stdout/$', InventoryUpdateStdout.as_view(), name='inventory_update_stdout'), - url(r'^(?P[0-9]+)/notifications/$', InventoryUpdateNotificationsList.as_view(), name='inventory_update_notifications_list'), - url(r'^(?P[0-9]+)/credentials/$', InventoryUpdateCredentialsList.as_view(), name='inventory_update_credentials_list'), - url(r'^(?P[0-9]+)/events/$', InventoryUpdateEventsList.as_view(), name='inventory_update_events_list'), + re_path(r'^$', InventoryUpdateList.as_view(), name='inventory_update_list'), + re_path(r'^(?P[0-9]+)/$', InventoryUpdateDetail.as_view(), name='inventory_update_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', InventoryUpdateCancel.as_view(), name='inventory_update_cancel'), + re_path(r'^(?P[0-9]+)/stdout/$', InventoryUpdateStdout.as_view(), name='inventory_update_stdout'), + re_path(r'^(?P[0-9]+)/notifications/$', InventoryUpdateNotificationsList.as_view(), name='inventory_update_notifications_list'), + re_path(r'^(?P[0-9]+)/credentials/$', InventoryUpdateCredentialsList.as_view(), name='inventory_update_credentials_list'), + re_path(r'^(?P[0-9]+)/events/$', InventoryUpdateEventsList.as_view(), name='inventory_update_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/job.py b/awx/api/urls/job.py index de45cba9aa..bea61a48a0 100644 --- a/awx/api/urls/job.py +++ b/awx/api/urls/job.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( JobList, @@ -20,18 +20,18 @@ from awx.api.views import ( urls = [ - url(r'^$', JobList.as_view(), name='job_list'), - url(r'^(?P[0-9]+)/$', JobDetail.as_view(), name='job_detail'), - url(r'^(?P[0-9]+)/cancel/$', JobCancel.as_view(), name='job_cancel'), - url(r'^(?P[0-9]+)/relaunch/$', JobRelaunch.as_view(), name='job_relaunch'), - url(r'^(?P[0-9]+)/create_schedule/$', JobCreateSchedule.as_view(), name='job_create_schedule'), - url(r'^(?P[0-9]+)/job_host_summaries/$', JobJobHostSummariesList.as_view(), name='job_job_host_summaries_list'), - url(r'^(?P[0-9]+)/job_events/$', JobJobEventsList.as_view(), name='job_job_events_list'), - url(r'^(?P[0-9]+)/activity_stream/$', JobActivityStreamList.as_view(), name='job_activity_stream_list'), - url(r'^(?P[0-9]+)/stdout/$', JobStdout.as_view(), name='job_stdout'), - url(r'^(?P[0-9]+)/notifications/$', JobNotificationsList.as_view(), name='job_notifications_list'), - url(r'^(?P[0-9]+)/labels/$', JobLabelList.as_view(), name='job_label_list'), - url(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'), + re_path(r'^$', JobList.as_view(), name='job_list'), + re_path(r'^(?P[0-9]+)/$', JobDetail.as_view(), name='job_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', JobCancel.as_view(), name='job_cancel'), + re_path(r'^(?P[0-9]+)/relaunch/$', JobRelaunch.as_view(), name='job_relaunch'), + re_path(r'^(?P[0-9]+)/create_schedule/$', JobCreateSchedule.as_view(), name='job_create_schedule'), + re_path(r'^(?P[0-9]+)/job_host_summaries/$', JobJobHostSummariesList.as_view(), name='job_job_host_summaries_list'), + re_path(r'^(?P[0-9]+)/job_events/$', JobJobEventsList.as_view(), name='job_job_events_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', JobActivityStreamList.as_view(), name='job_activity_stream_list'), + re_path(r'^(?P[0-9]+)/stdout/$', JobStdout.as_view(), name='job_stdout'), + re_path(r'^(?P[0-9]+)/notifications/$', JobNotificationsList.as_view(), name='job_notifications_list'), + re_path(r'^(?P[0-9]+)/labels/$', JobLabelList.as_view(), name='job_label_list'), + re_path(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'), ] __all__ = ['urls'] diff --git a/awx/api/urls/job_event.py b/awx/api/urls/job_event.py index 94f3b33929..1a5c681124 100644 --- a/awx/api/urls/job_event.py +++ b/awx/api/urls/job_event.py @@ -1,13 +1,13 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import JobEventDetail, JobEventChildrenList urls = [ - url(r'^(?P[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'), - url(r'^(?P[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'), + re_path(r'^(?P[0-9]+)/$', JobEventDetail.as_view(), name='job_event_detail'), + re_path(r'^(?P[0-9]+)/children/$', JobEventChildrenList.as_view(), name='job_event_children_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/job_host_summary.py b/awx/api/urls/job_host_summary.py index 8252a982d0..c736c59a17 100644 --- a/awx/api/urls/job_host_summary.py +++ b/awx/api/urls/job_host_summary.py @@ -1,11 +1,11 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import JobHostSummaryDetail -urls = [url(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')] +urls = [re_path(r'^(?P[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')] __all__ = ['urls'] diff --git a/awx/api/urls/job_template.py b/awx/api/urls/job_template.py index 967f821fd1..b73be7e913 100644 --- a/awx/api/urls/job_template.py +++ b/awx/api/urls/job_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import include, url +from django.urls import include, re_path from awx.api.views import ( JobTemplateList, @@ -25,36 +25,36 @@ from awx.api.views import ( urls = [ - url(r'^$', JobTemplateList.as_view(), name='job_template_list'), - url(r'^(?P[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'), - url(r'^(?P[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'), - url(r'^(?P[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'), - url(r'^(?P[0-9]+)/slice_workflow_jobs/$', JobTemplateSliceWorkflowJobsList.as_view(), name='job_template_slice_workflow_jobs_list'), - url(r'^(?P[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'), - url(r'^(?P[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'), - url(r'^(?P[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'), - url(r'^(?P[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'), - url( + re_path(r'^$', JobTemplateList.as_view(), name='job_template_list'), + re_path(r'^(?P[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'), + re_path(r'^(?P[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'), + re_path(r'^(?P[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'), + re_path(r'^(?P[0-9]+)/slice_workflow_jobs/$', JobTemplateSliceWorkflowJobsList.as_view(), name='job_template_slice_workflow_jobs_list'), + re_path(r'^(?P[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'), + re_path(r'^(?P[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'), + re_path(r'^(?P[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'), + re_path(r'^(?P[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', JobTemplateNotificationTemplatesStartedList.as_view(), name='job_template_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', JobTemplateNotificationTemplatesErrorList.as_view(), name='job_template_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', JobTemplateNotificationTemplatesSuccessList.as_view(), name='job_template_notification_templates_success_list', ), - url(r'^(?P[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'), - url(r'^(?P[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'), - url(r'^(?P[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'), - url(r'^(?P[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'), - url(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'job_templates'}), + re_path(r'^(?P[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'), + re_path(r'^(?P[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'), + re_path(r'^(?P[0-9]+)/labels/$', JobTemplateLabelList.as_view(), name='job_template_label_list'), + re_path(r'^(?P[0-9]+)/copy/$', JobTemplateCopy.as_view(), name='job_template_copy'), + re_path(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'job_templates'}), ] __all__ = ['urls'] diff --git a/awx/api/urls/label.py b/awx/api/urls/label.py index 368fca7aa8..5fc0a4f629 100644 --- a/awx/api/urls/label.py +++ b/awx/api/urls/label.py @@ -1,11 +1,11 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import LabelList, LabelDetail -urls = [url(r'^$', LabelList.as_view(), name='label_list'), url(r'^(?P[0-9]+)/$', LabelDetail.as_view(), name='label_detail')] +urls = [re_path(r'^$', LabelList.as_view(), name='label_list'), re_path(r'^(?P[0-9]+)/$', LabelDetail.as_view(), name='label_detail')] __all__ = ['urls'] diff --git a/awx/api/urls/notification.py b/awx/api/urls/notification.py index 960318ee15..2ed27e7c3d 100644 --- a/awx/api/urls/notification.py +++ b/awx/api/urls/notification.py @@ -1,11 +1,14 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import NotificationList, NotificationDetail -urls = [url(r'^$', NotificationList.as_view(), name='notification_list'), url(r'^(?P[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail')] +urls = [ + re_path(r'^$', NotificationList.as_view(), name='notification_list'), + re_path(r'^(?P[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'), +] __all__ = ['urls'] diff --git a/awx/api/urls/notification_template.py b/awx/api/urls/notification_template.py index 8473878922..764c571868 100644 --- a/awx/api/urls/notification_template.py +++ b/awx/api/urls/notification_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( NotificationTemplateList, @@ -13,11 +13,11 @@ from awx.api.views import ( urls = [ - url(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'), - url(r'^(?P[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'), - url(r'^(?P[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'), - url(r'^(?P[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'), - url(r'^(?P[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'), + re_path(r'^$', NotificationTemplateList.as_view(), name='notification_template_list'), + re_path(r'^(?P[0-9]+)/$', NotificationTemplateDetail.as_view(), name='notification_template_detail'), + re_path(r'^(?P[0-9]+)/test/$', NotificationTemplateTest.as_view(), name='notification_template_test'), + re_path(r'^(?P[0-9]+)/notifications/$', NotificationTemplateNotificationList.as_view(), name='notification_template_notification_list'), + re_path(r'^(?P[0-9]+)/copy/$', NotificationTemplateCopy.as_view(), name='notification_template_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/oauth2.py b/awx/api/urls/oauth2.py index 720ba2416f..f613b34a0b 100644 --- a/awx/api/urls/oauth2.py +++ b/awx/api/urls/oauth2.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( OAuth2ApplicationList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), - url(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), - url(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'), - url(r'^applications/(?P[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'), - url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), - url(r'^tokens/(?P[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'), - url(r'^tokens/(?P[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'), + re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), + re_path(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), + re_path(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'), + re_path(r'^applications/(?P[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'), + re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), + re_path(r'^tokens/(?P[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'), + re_path(r'^tokens/(?P[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/oauth2_root.py b/awx/api/urls/oauth2_root.py index 61e1e15850..d15d14825e 100644 --- a/awx/api/urls/oauth2_root.py +++ b/awx/api/urls/oauth2_root.py @@ -4,7 +4,7 @@ from datetime import timedelta from django.utils.timezone import now from django.conf import settings -from django.conf.urls import url +from django.urls import re_path from oauthlib import oauth2 from oauth2_provider import views @@ -35,10 +35,10 @@ class TokenView(views.TokenView): urls = [ - url(r'^$', ApiOAuthAuthorizationRootView.as_view(), name='oauth_authorization_root_view'), - url(r"^authorize/$", views.AuthorizationView.as_view(), name="authorize"), - url(r"^token/$", TokenView.as_view(), name="token"), - url(r"^revoke_token/$", views.RevokeTokenView.as_view(), name="revoke-token"), + re_path(r'^$', ApiOAuthAuthorizationRootView.as_view(), name='oauth_authorization_root_view'), + re_path(r"^authorize/$", views.AuthorizationView.as_view(), name="authorize"), + re_path(r"^token/$", TokenView.as_view(), name="token"), + re_path(r"^revoke_token/$", views.RevokeTokenView.as_view(), name="revoke-token"), ] diff --git a/awx/api/urls/organization.py b/awx/api/urls/organization.py index 9eac94da48..c841a53181 100644 --- a/awx/api/urls/organization.py +++ b/awx/api/urls/organization.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( OrganizationList, @@ -30,44 +30,44 @@ from awx.api.views import ( urls = [ - url(r'^$', OrganizationList.as_view(), name='organization_list'), - url(r'^(?P[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'), - url(r'^(?P[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'), - url(r'^(?P[0-9]+)/admins/$', OrganizationAdminsList.as_view(), name='organization_admins_list'), - url(r'^(?P[0-9]+)/inventories/$', OrganizationInventoriesList.as_view(), name='organization_inventories_list'), - url(r'^(?P[0-9]+)/execution_environments/$', OrganizationExecutionEnvironmentsList.as_view(), name='organization_execution_environments_list'), - url(r'^(?P[0-9]+)/projects/$', OrganizationProjectsList.as_view(), name='organization_projects_list'), - url(r'^(?P[0-9]+)/job_templates/$', OrganizationJobTemplatesList.as_view(), name='organization_job_templates_list'), - url(r'^(?P[0-9]+)/workflow_job_templates/$', OrganizationWorkflowJobTemplatesList.as_view(), name='organization_workflow_job_templates_list'), - url(r'^(?P[0-9]+)/teams/$', OrganizationTeamsList.as_view(), name='organization_teams_list'), - url(r'^(?P[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'), - url(r'^(?P[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'), - url(r'^(?P[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'), - url( + re_path(r'^$', OrganizationList.as_view(), name='organization_list'), + re_path(r'^(?P[0-9]+)/$', OrganizationDetail.as_view(), name='organization_detail'), + re_path(r'^(?P[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'), + re_path(r'^(?P[0-9]+)/admins/$', OrganizationAdminsList.as_view(), name='organization_admins_list'), + re_path(r'^(?P[0-9]+)/inventories/$', OrganizationInventoriesList.as_view(), name='organization_inventories_list'), + re_path(r'^(?P[0-9]+)/execution_environments/$', OrganizationExecutionEnvironmentsList.as_view(), name='organization_execution_environments_list'), + re_path(r'^(?P[0-9]+)/projects/$', OrganizationProjectsList.as_view(), name='organization_projects_list'), + re_path(r'^(?P[0-9]+)/job_templates/$', OrganizationJobTemplatesList.as_view(), name='organization_job_templates_list'), + re_path(r'^(?P[0-9]+)/workflow_job_templates/$', OrganizationWorkflowJobTemplatesList.as_view(), name='organization_workflow_job_templates_list'), + re_path(r'^(?P[0-9]+)/teams/$', OrganizationTeamsList.as_view(), name='organization_teams_list'), + re_path(r'^(?P[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'), + re_path(r'^(?P[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', OrganizationNotificationTemplatesStartedList.as_view(), name='organization_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', OrganizationNotificationTemplatesErrorList.as_view(), name='organization_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(), name='organization_notification_templates_success_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_approvals/$', OrganizationNotificationTemplatesApprovalList.as_view(), name='organization_notification_templates_approvals_list', ), - url(r'^(?P[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'), - url(r'^(?P[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'), - url(r'^(?P[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'), - url(r'^(?P[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'), - url(r'^(?P[0-9]+)/applications/$', OrganizationApplicationList.as_view(), name='organization_applications_list'), + re_path(r'^(?P[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'), + re_path(r'^(?P[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'), + re_path(r'^(?P[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'), + re_path(r'^(?P[0-9]+)/applications/$', OrganizationApplicationList.as_view(), name='organization_applications_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/project.py b/awx/api/urls/project.py index ea356a651b..0ce6cacecb 100644 --- a/awx/api/urls/project.py +++ b/awx/api/urls/project.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( ProjectList, @@ -24,30 +24,32 @@ from awx.api.views import ( urls = [ - url(r'^$', ProjectList.as_view(), name='project_list'), - url(r'^(?P[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'), - url(r'^(?P[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'), - url(r'^(?P[0-9]+)/inventories/$', ProjectInventories.as_view(), name='project_inventories'), - url(r'^(?P[0-9]+)/scm_inventory_sources/$', ProjectScmInventorySources.as_view(), name='project_scm_inventory_sources'), - url(r'^(?P[0-9]+)/teams/$', ProjectTeamsList.as_view(), name='project_teams_list'), - url(r'^(?P[0-9]+)/update/$', ProjectUpdateView.as_view(), name='project_update_view'), - url(r'^(?P[0-9]+)/project_updates/$', ProjectUpdatesList.as_view(), name='project_updates_list'), - url(r'^(?P[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'), - url(r'^(?P[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'), - url(r'^(?P[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'), - url( + re_path(r'^$', ProjectList.as_view(), name='project_list'), + re_path(r'^(?P[0-9]+)/$', ProjectDetail.as_view(), name='project_detail'), + re_path(r'^(?P[0-9]+)/playbooks/$', ProjectPlaybooks.as_view(), name='project_playbooks'), + re_path(r'^(?P[0-9]+)/inventories/$', ProjectInventories.as_view(), name='project_inventories'), + re_path(r'^(?P[0-9]+)/scm_inventory_sources/$', ProjectScmInventorySources.as_view(), name='project_scm_inventory_sources'), + re_path(r'^(?P[0-9]+)/teams/$', ProjectTeamsList.as_view(), name='project_teams_list'), + re_path(r'^(?P[0-9]+)/update/$', ProjectUpdateView.as_view(), name='project_update_view'), + re_path(r'^(?P[0-9]+)/project_updates/$', ProjectUpdatesList.as_view(), name='project_updates_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'), + re_path(r'^(?P[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'), + re_path( + r'^(?P[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list' + ), + re_path( r'^(?P[0-9]+)/notification_templates_success/$', ProjectNotificationTemplatesSuccessList.as_view(), name='project_notification_templates_success_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_started/$', ProjectNotificationTemplatesStartedList.as_view(), name='project_notification_templates_started_list', ), - url(r'^(?P[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'), - url(r'^(?P[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'), - url(r'^(?P[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'), + re_path(r'^(?P[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'), + re_path(r'^(?P[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'), + re_path(r'^(?P[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'), ] __all__ = ['urls'] diff --git a/awx/api/urls/project_update.py b/awx/api/urls/project_update.py index 03356602ca..fc3e2d2d52 100644 --- a/awx/api/urls/project_update.py +++ b/awx/api/urls/project_update.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( ProjectUpdateList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^$', ProjectUpdateList.as_view(), name='project_update_list'), - url(r'^(?P[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'), - url(r'^(?P[0-9]+)/cancel/$', ProjectUpdateCancel.as_view(), name='project_update_cancel'), - url(r'^(?P[0-9]+)/stdout/$', ProjectUpdateStdout.as_view(), name='project_update_stdout'), - url(r'^(?P[0-9]+)/scm_inventory_updates/$', ProjectUpdateScmInventoryUpdates.as_view(), name='project_update_scm_inventory_updates'), - url(r'^(?P[0-9]+)/notifications/$', ProjectUpdateNotificationsList.as_view(), name='project_update_notifications_list'), - url(r'^(?P[0-9]+)/events/$', ProjectUpdateEventsList.as_view(), name='project_update_events_list'), + re_path(r'^$', ProjectUpdateList.as_view(), name='project_update_list'), + re_path(r'^(?P[0-9]+)/$', ProjectUpdateDetail.as_view(), name='project_update_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', ProjectUpdateCancel.as_view(), name='project_update_cancel'), + re_path(r'^(?P[0-9]+)/stdout/$', ProjectUpdateStdout.as_view(), name='project_update_stdout'), + re_path(r'^(?P[0-9]+)/scm_inventory_updates/$', ProjectUpdateScmInventoryUpdates.as_view(), name='project_update_scm_inventory_updates'), + re_path(r'^(?P[0-9]+)/notifications/$', ProjectUpdateNotificationsList.as_view(), name='project_update_notifications_list'), + re_path(r'^(?P[0-9]+)/events/$', ProjectUpdateEventsList.as_view(), name='project_update_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/role.py b/awx/api/urls/role.py index 2b6aed19b5..0ee306ef0c 100644 --- a/awx/api/urls/role.py +++ b/awx/api/urls/role.py @@ -1,18 +1,18 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList urls = [ - url(r'^$', RoleList.as_view(), name='role_list'), - url(r'^(?P[0-9]+)/$', RoleDetail.as_view(), name='role_detail'), - url(r'^(?P[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'), - url(r'^(?P[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'), - url(r'^(?P[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'), - url(r'^(?P[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'), + re_path(r'^$', RoleList.as_view(), name='role_list'), + re_path(r'^(?P[0-9]+)/$', RoleDetail.as_view(), name='role_detail'), + re_path(r'^(?P[0-9]+)/users/$', RoleUsersList.as_view(), name='role_users_list'), + re_path(r'^(?P[0-9]+)/teams/$', RoleTeamsList.as_view(), name='role_teams_list'), + re_path(r'^(?P[0-9]+)/parents/$', RoleParentsList.as_view(), name='role_parents_list'), + re_path(r'^(?P[0-9]+)/children/$', RoleChildrenList.as_view(), name='role_children_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/schedule.py b/awx/api/urls/schedule.py index c3c91f7ae0..87907eda8f 100644 --- a/awx/api/urls/schedule.py +++ b/awx/api/urls/schedule.py @@ -1,16 +1,16 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList urls = [ - url(r'^$', ScheduleList.as_view(), name='schedule_list'), - url(r'^(?P[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'), - url(r'^(?P[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'), - url(r'^(?P[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'), + re_path(r'^$', ScheduleList.as_view(), name='schedule_list'), + re_path(r'^(?P[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'), + re_path(r'^(?P[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'), + re_path(r'^(?P[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/system_job.py b/awx/api/urls/system_job.py index 8b060a2d85..891a697006 100644 --- a/awx/api/urls/system_job.py +++ b/awx/api/urls/system_job.py @@ -1,17 +1,17 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import SystemJobList, SystemJobDetail, SystemJobCancel, SystemJobNotificationsList, SystemJobEventsList urls = [ - url(r'^$', SystemJobList.as_view(), name='system_job_list'), - url(r'^(?P[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'), - url(r'^(?P[0-9]+)/cancel/$', SystemJobCancel.as_view(), name='system_job_cancel'), - url(r'^(?P[0-9]+)/notifications/$', SystemJobNotificationsList.as_view(), name='system_job_notifications_list'), - url(r'^(?P[0-9]+)/events/$', SystemJobEventsList.as_view(), name='system_job_events_list'), + re_path(r'^$', SystemJobList.as_view(), name='system_job_list'), + re_path(r'^(?P[0-9]+)/$', SystemJobDetail.as_view(), name='system_job_detail'), + re_path(r'^(?P[0-9]+)/cancel/$', SystemJobCancel.as_view(), name='system_job_cancel'), + re_path(r'^(?P[0-9]+)/notifications/$', SystemJobNotificationsList.as_view(), name='system_job_notifications_list'), + re_path(r'^(?P[0-9]+)/events/$', SystemJobEventsList.as_view(), name='system_job_events_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/system_job_template.py b/awx/api/urls/system_job_template.py index 532d35d97a..e0b68d3135 100644 --- a/awx/api/urls/system_job_template.py +++ b/awx/api/urls/system_job_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( SystemJobTemplateList, @@ -16,22 +16,22 @@ from awx.api.views import ( urls = [ - url(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'), - url(r'^(?P[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'), - url(r'^(?P[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'), - url(r'^(?P[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'), - url(r'^(?P[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'), - url( + re_path(r'^$', SystemJobTemplateList.as_view(), name='system_job_template_list'), + re_path(r'^(?P[0-9]+)/$', SystemJobTemplateDetail.as_view(), name='system_job_template_detail'), + re_path(r'^(?P[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'), + re_path(r'^(?P[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'), + re_path(r'^(?P[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', SystemJobTemplateNotificationTemplatesStartedList.as_view(), name='system_job_template_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', SystemJobTemplateNotificationTemplatesErrorList.as_view(), name='system_job_template_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', SystemJobTemplateNotificationTemplatesSuccessList.as_view(), name='system_job_template_notification_templates_success_list', diff --git a/awx/api/urls/team.py b/awx/api/urls/team.py index 185c86e42a..311a2e8009 100644 --- a/awx/api/urls/team.py +++ b/awx/api/urls/team.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( TeamList, @@ -17,15 +17,15 @@ from awx.api.views import ( urls = [ - url(r'^$', TeamList.as_view(), name='team_list'), - url(r'^(?P[0-9]+)/$', TeamDetail.as_view(), name='team_detail'), - url(r'^(?P[0-9]+)/projects/$', TeamProjectsList.as_view(), name='team_projects_list'), - url(r'^(?P[0-9]+)/users/$', TeamUsersList.as_view(), name='team_users_list'), - url(r'^(?P[0-9]+)/credentials/$', TeamCredentialsList.as_view(), name='team_credentials_list'), - url(r'^(?P[0-9]+)/roles/$', TeamRolesList.as_view(), name='team_roles_list'), - url(r'^(?P[0-9]+)/object_roles/$', TeamObjectRolesList.as_view(), name='team_object_roles_list'), - url(r'^(?P[0-9]+)/activity_stream/$', TeamActivityStreamList.as_view(), name='team_activity_stream_list'), - url(r'^(?P[0-9]+)/access_list/$', TeamAccessList.as_view(), name='team_access_list'), + re_path(r'^$', TeamList.as_view(), name='team_list'), + re_path(r'^(?P[0-9]+)/$', TeamDetail.as_view(), name='team_detail'), + re_path(r'^(?P[0-9]+)/projects/$', TeamProjectsList.as_view(), name='team_projects_list'), + re_path(r'^(?P[0-9]+)/users/$', TeamUsersList.as_view(), name='team_users_list'), + re_path(r'^(?P[0-9]+)/credentials/$', TeamCredentialsList.as_view(), name='team_credentials_list'), + re_path(r'^(?P[0-9]+)/roles/$', TeamRolesList.as_view(), name='team_roles_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', TeamObjectRolesList.as_view(), name='team_object_roles_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', TeamActivityStreamList.as_view(), name='team_activity_stream_list'), + re_path(r'^(?P[0-9]+)/access_list/$', TeamAccessList.as_view(), name='team_access_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/urls.py b/awx/api/urls/urls.py index 017fa307aa..c092696d24 100644 --- a/awx/api/urls/urls.py +++ b/awx/api/urls/urls.py @@ -3,7 +3,7 @@ from __future__ import absolute_import, unicode_literals from django.conf import settings -from django.conf.urls import include, url +from django.urls import include, re_path from awx.api.generics import LoggedLoginView, LoggedLogoutView from awx.api.views import ( @@ -74,78 +74,78 @@ from .workflow_approval import urls as workflow_approval_urls v2_urls = [ - url(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'), - url(r'^credential_types/', include(credential_type_urls)), - url(r'^credential_input_sources/', include(credential_input_source_urls)), - url(r'^hosts/(?P[0-9]+)/ansible_facts/$', HostAnsibleFactsDetail.as_view(), name='host_ansible_facts_detail'), - url(r'^jobs/(?P[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'), - url(r'^job_templates/(?P[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'), - url(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'), - url(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'), - url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), - url(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), - url(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'), - url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), - url(r'^', include(oauth2_urls)), - url(r'^metrics/$', MetricsView.as_view(), name='metrics_view'), - url(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'), - url(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'), - url(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'), - url(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'), - url(r'^auth/$', AuthView.as_view()), - url(r'^me/$', UserMeList.as_view(), name='user_me_list'), - url(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'), - url(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'), - url(r'^mesh_visualizer/', MeshVisualizer.as_view(), name='mesh_visualizer_view'), - url(r'^settings/', include('awx.conf.urls')), - url(r'^instances/', include(instance_urls)), - url(r'^instance_groups/', include(instance_group_urls)), - url(r'^schedules/', include(schedule_urls)), - url(r'^organizations/', include(organization_urls)), - url(r'^users/', include(user_urls)), - url(r'^execution_environments/', include(execution_environment_urls)), - url(r'^projects/', include(project_urls)), - url(r'^project_updates/', include(project_update_urls)), - url(r'^teams/', include(team_urls)), - url(r'^inventories/', include(inventory_urls)), - url(r'^hosts/', include(host_urls)), - url(r'^groups/', include(group_urls)), - url(r'^inventory_sources/', include(inventory_source_urls)), - url(r'^inventory_updates/', include(inventory_update_urls)), - url(r'^credentials/', include(credential_urls)), - url(r'^roles/', include(role_urls)), - url(r'^job_templates/', include(job_template_urls)), - url(r'^jobs/', include(job_urls)), - url(r'^job_host_summaries/', include(job_host_summary_urls)), - url(r'^job_events/', include(job_event_urls)), - url(r'^ad_hoc_commands/', include(ad_hoc_command_urls)), - url(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)), - url(r'^system_job_templates/', include(system_job_template_urls)), - url(r'^system_jobs/', include(system_job_urls)), - url(r'^notification_templates/', include(notification_template_urls)), - url(r'^notifications/', include(notification_urls)), - url(r'^workflow_job_templates/', include(workflow_job_template_urls)), - url(r'^workflow_jobs/', include(workflow_job_urls)), - url(r'^labels/', include(label_urls)), - url(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)), - url(r'^workflow_job_nodes/', include(workflow_job_node_urls)), - url(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'), - url(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'), - url(r'^activity_stream/', include(activity_stream_urls)), - url(r'^workflow_approval_templates/', include(workflow_approval_template_urls)), - url(r'^workflow_approvals/', include(workflow_approval_urls)), + re_path(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'), + re_path(r'^credential_types/', include(credential_type_urls)), + re_path(r'^credential_input_sources/', include(credential_input_source_urls)), + re_path(r'^hosts/(?P[0-9]+)/ansible_facts/$', HostAnsibleFactsDetail.as_view(), name='host_ansible_facts_detail'), + re_path(r'^jobs/(?P[0-9]+)/credentials/$', JobCredentialsList.as_view(), name='job_credentials_list'), + re_path(r'^job_templates/(?P[0-9]+)/credentials/$', JobTemplateCredentialsList.as_view(), name='job_template_credentials_list'), + re_path(r'^schedules/preview/$', SchedulePreview.as_view(), name='schedule_rrule'), + re_path(r'^schedules/zoneinfo/$', ScheduleZoneInfo.as_view(), name='schedule_zoneinfo'), + re_path(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), + re_path(r'^applications/(?P[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'), + re_path(r'^applications/(?P[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='application_o_auth2_token_list'), + re_path(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), + re_path(r'^', include(oauth2_urls)), + re_path(r'^metrics/$', MetricsView.as_view(), name='metrics_view'), + re_path(r'^ping/$', ApiV2PingView.as_view(), name='api_v2_ping_view'), + re_path(r'^config/$', ApiV2ConfigView.as_view(), name='api_v2_config_view'), + re_path(r'^config/subscriptions/$', ApiV2SubscriptionView.as_view(), name='api_v2_subscription_view'), + re_path(r'^config/attach/$', ApiV2AttachView.as_view(), name='api_v2_attach_view'), + re_path(r'^auth/$', AuthView.as_view()), + re_path(r'^me/$', UserMeList.as_view(), name='user_me_list'), + re_path(r'^dashboard/$', DashboardView.as_view(), name='dashboard_view'), + re_path(r'^dashboard/graphs/jobs/$', DashboardJobsGraphView.as_view(), name='dashboard_jobs_graph_view'), + re_path(r'^mesh_visualizer/', MeshVisualizer.as_view(), name='mesh_visualizer_view'), + re_path(r'^settings/', include('awx.conf.urls')), + re_path(r'^instances/', include(instance_urls)), + re_path(r'^instance_groups/', include(instance_group_urls)), + re_path(r'^schedules/', include(schedule_urls)), + re_path(r'^organizations/', include(organization_urls)), + re_path(r'^users/', include(user_urls)), + re_path(r'^execution_environments/', include(execution_environment_urls)), + re_path(r'^projects/', include(project_urls)), + re_path(r'^project_updates/', include(project_update_urls)), + re_path(r'^teams/', include(team_urls)), + re_path(r'^inventories/', include(inventory_urls)), + re_path(r'^hosts/', include(host_urls)), + re_path(r'^groups/', include(group_urls)), + re_path(r'^inventory_sources/', include(inventory_source_urls)), + re_path(r'^inventory_updates/', include(inventory_update_urls)), + re_path(r'^credentials/', include(credential_urls)), + re_path(r'^roles/', include(role_urls)), + re_path(r'^job_templates/', include(job_template_urls)), + re_path(r'^jobs/', include(job_urls)), + re_path(r'^job_host_summaries/', include(job_host_summary_urls)), + re_path(r'^job_events/', include(job_event_urls)), + re_path(r'^ad_hoc_commands/', include(ad_hoc_command_urls)), + re_path(r'^ad_hoc_command_events/', include(ad_hoc_command_event_urls)), + re_path(r'^system_job_templates/', include(system_job_template_urls)), + re_path(r'^system_jobs/', include(system_job_urls)), + re_path(r'^notification_templates/', include(notification_template_urls)), + re_path(r'^notifications/', include(notification_urls)), + re_path(r'^workflow_job_templates/', include(workflow_job_template_urls)), + re_path(r'^workflow_jobs/', include(workflow_job_urls)), + re_path(r'^labels/', include(label_urls)), + re_path(r'^workflow_job_template_nodes/', include(workflow_job_template_node_urls)), + re_path(r'^workflow_job_nodes/', include(workflow_job_node_urls)), + re_path(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'), + re_path(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'), + re_path(r'^activity_stream/', include(activity_stream_urls)), + re_path(r'^workflow_approval_templates/', include(workflow_approval_template_urls)), + re_path(r'^workflow_approvals/', include(workflow_approval_urls)), ] app_name = 'api' urlpatterns = [ - url(r'^$', ApiRootView.as_view(), name='api_root_view'), - url(r'^(?P(v2))/', include(v2_urls)), - url(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'), - url(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'), - url(r'^o/', include(oauth2_root_urls)), + re_path(r'^$', ApiRootView.as_view(), name='api_root_view'), + re_path(r'^(?P(v2))/', include(v2_urls)), + re_path(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'), + re_path(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'), + re_path(r'^o/', include(oauth2_root_urls)), ] if settings.SETTINGS_MODULE == 'awx.settings.development': from awx.api.swagger import SwaggerSchemaView - urlpatterns += [url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')] + urlpatterns += [re_path(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')] diff --git a/awx/api/urls/user.py b/awx/api/urls/user.py index 340c428ba5..39bc07aec4 100644 --- a/awx/api/urls/user.py +++ b/awx/api/urls/user.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( UserList, @@ -21,20 +21,20 @@ from awx.api.views import ( ) urls = [ - url(r'^$', UserList.as_view(), name='user_list'), - url(r'^(?P[0-9]+)/$', UserDetail.as_view(), name='user_detail'), - url(r'^(?P[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'), - url(r'^(?P[0-9]+)/organizations/$', UserOrganizationsList.as_view(), name='user_organizations_list'), - url(r'^(?P[0-9]+)/admin_of_organizations/$', UserAdminOfOrganizationsList.as_view(), name='user_admin_of_organizations_list'), - url(r'^(?P[0-9]+)/projects/$', UserProjectsList.as_view(), name='user_projects_list'), - url(r'^(?P[0-9]+)/credentials/$', UserCredentialsList.as_view(), name='user_credentials_list'), - url(r'^(?P[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'), - url(r'^(?P[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'), - url(r'^(?P[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'), - url(r'^(?P[0-9]+)/applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), - url(r'^(?P[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'), - url(r'^(?P[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'), - url(r'^(?P[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'), + re_path(r'^$', UserList.as_view(), name='user_list'), + re_path(r'^(?P[0-9]+)/$', UserDetail.as_view(), name='user_detail'), + re_path(r'^(?P[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'), + re_path(r'^(?P[0-9]+)/organizations/$', UserOrganizationsList.as_view(), name='user_organizations_list'), + re_path(r'^(?P[0-9]+)/admin_of_organizations/$', UserAdminOfOrganizationsList.as_view(), name='user_admin_of_organizations_list'), + re_path(r'^(?P[0-9]+)/projects/$', UserProjectsList.as_view(), name='user_projects_list'), + re_path(r'^(?P[0-9]+)/credentials/$', UserCredentialsList.as_view(), name='user_credentials_list'), + re_path(r'^(?P[0-9]+)/roles/$', UserRolesList.as_view(), name='user_roles_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', UserActivityStreamList.as_view(), name='user_activity_stream_list'), + re_path(r'^(?P[0-9]+)/access_list/$', UserAccessList.as_view(), name='user_access_list'), + re_path(r'^(?P[0-9]+)/applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), + re_path(r'^(?P[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'), + re_path(r'^(?P[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'), + re_path(r'^(?P[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/webhooks.py b/awx/api/urls/webhooks.py index f6739a5df9..764e3dd6e2 100644 --- a/awx/api/urls/webhooks.py +++ b/awx/api/urls/webhooks.py @@ -1,10 +1,10 @@ -from django.conf.urls import url +from django.urls import re_path from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver urlpatterns = [ - url(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'), - url(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'), - url(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'), + re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'), + re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'), + re_path(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'), ] diff --git a/awx/api/urls/workflow_approval.py b/awx/api/urls/workflow_approval.py index a3c6454af1..640528edbd 100644 --- a/awx/api/urls/workflow_approval.py +++ b/awx/api/urls/workflow_approval.py @@ -1,16 +1,16 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import WorkflowApprovalList, WorkflowApprovalDetail, WorkflowApprovalApprove, WorkflowApprovalDeny urls = [ - url(r'^$', WorkflowApprovalList.as_view(), name='workflow_approval_list'), - url(r'^(?P[0-9]+)/$', WorkflowApprovalDetail.as_view(), name='workflow_approval_detail'), - url(r'^(?P[0-9]+)/approve/$', WorkflowApprovalApprove.as_view(), name='workflow_approval_approve'), - url(r'^(?P[0-9]+)/deny/$', WorkflowApprovalDeny.as_view(), name='workflow_approval_deny'), + re_path(r'^$', WorkflowApprovalList.as_view(), name='workflow_approval_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowApprovalDetail.as_view(), name='workflow_approval_detail'), + re_path(r'^(?P[0-9]+)/approve/$', WorkflowApprovalApprove.as_view(), name='workflow_approval_approve'), + re_path(r'^(?P[0-9]+)/deny/$', WorkflowApprovalDeny.as_view(), name='workflow_approval_deny'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_approval_template.py b/awx/api/urls/workflow_approval_template.py index f49929b283..811ad351d2 100644 --- a/awx/api/urls/workflow_approval_template.py +++ b/awx/api/urls/workflow_approval_template.py @@ -1,14 +1,14 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import WorkflowApprovalTemplateDetail, WorkflowApprovalTemplateJobsList urls = [ - url(r'^(?P[0-9]+)/$', WorkflowApprovalTemplateDetail.as_view(), name='workflow_approval_template_detail'), - url(r'^(?P[0-9]+)/approvals/$', WorkflowApprovalTemplateJobsList.as_view(), name='workflow_approval_template_jobs_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowApprovalTemplateDetail.as_view(), name='workflow_approval_template_detail'), + re_path(r'^(?P[0-9]+)/approvals/$', WorkflowApprovalTemplateJobsList.as_view(), name='workflow_approval_template_jobs_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job.py b/awx/api/urls/workflow_job.py index 1ecbb39373..707b7080f9 100644 --- a/awx/api/urls/workflow_job.py +++ b/awx/api/urls/workflow_job.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( WorkflowJobList, @@ -16,14 +16,14 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'), - url(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobWorkflowNodesList.as_view(), name='workflow_job_workflow_nodes_list'), - url(r'^(?P[0-9]+)/labels/$', WorkflowJobLabelList.as_view(), name='workflow_job_label_list'), - url(r'^(?P[0-9]+)/cancel/$', WorkflowJobCancel.as_view(), name='workflow_job_cancel'), - url(r'^(?P[0-9]+)/relaunch/$', WorkflowJobRelaunch.as_view(), name='workflow_job_relaunch'), - url(r'^(?P[0-9]+)/notifications/$', WorkflowJobNotificationsList.as_view(), name='workflow_job_notifications_list'), - url(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobActivityStreamList.as_view(), name='workflow_job_activity_stream_list'), + re_path(r'^$', WorkflowJobList.as_view(), name='workflow_job_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobDetail.as_view(), name='workflow_job_detail'), + re_path(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobWorkflowNodesList.as_view(), name='workflow_job_workflow_nodes_list'), + re_path(r'^(?P[0-9]+)/labels/$', WorkflowJobLabelList.as_view(), name='workflow_job_label_list'), + re_path(r'^(?P[0-9]+)/cancel/$', WorkflowJobCancel.as_view(), name='workflow_job_cancel'), + re_path(r'^(?P[0-9]+)/relaunch/$', WorkflowJobRelaunch.as_view(), name='workflow_job_relaunch'), + re_path(r'^(?P[0-9]+)/notifications/$', WorkflowJobNotificationsList.as_view(), name='workflow_job_notifications_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobActivityStreamList.as_view(), name='workflow_job_activity_stream_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job_node.py b/awx/api/urls/workflow_job_node.py index 809ee515f0..5b246c95b4 100644 --- a/awx/api/urls/workflow_job_node.py +++ b/awx/api/urls/workflow_job_node.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( WorkflowJobNodeList, @@ -14,12 +14,12 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'), - url(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobNodeSuccessNodesList.as_view(), name='workflow_job_node_success_nodes_list'), - url(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'), - url(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'), - url(r'^(?P[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'), + re_path(r'^$', WorkflowJobNodeList.as_view(), name='workflow_job_node_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobNodeDetail.as_view(), name='workflow_job_node_detail'), + re_path(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobNodeSuccessNodesList.as_view(), name='workflow_job_node_success_nodes_list'), + re_path(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'), + re_path(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'), + re_path(r'^(?P[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job_template.py b/awx/api/urls/workflow_job_template.py index 90b3c043fc..e2729186ca 100644 --- a/awx/api/urls/workflow_job_template.py +++ b/awx/api/urls/workflow_job_template.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import include, url +from django.urls import include, re_path from awx.api.views import ( WorkflowJobTemplateList, @@ -24,39 +24,39 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'), - url(r'^(?P[0-9]+)/workflow_jobs/$', WorkflowJobTemplateJobsList.as_view(), name='workflow_job_template_jobs_list'), - url(r'^(?P[0-9]+)/launch/$', WorkflowJobTemplateLaunch.as_view(), name='workflow_job_template_launch'), - url(r'^(?P[0-9]+)/copy/$', WorkflowJobTemplateCopy.as_view(), name='workflow_job_template_copy'), - url(r'^(?P[0-9]+)/schedules/$', WorkflowJobTemplateSchedulesList.as_view(), name='workflow_job_template_schedules_list'), - url(r'^(?P[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'), - url(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'), - url(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'), - url( + re_path(r'^$', WorkflowJobTemplateList.as_view(), name='workflow_job_template_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobTemplateDetail.as_view(), name='workflow_job_template_detail'), + re_path(r'^(?P[0-9]+)/workflow_jobs/$', WorkflowJobTemplateJobsList.as_view(), name='workflow_job_template_jobs_list'), + re_path(r'^(?P[0-9]+)/launch/$', WorkflowJobTemplateLaunch.as_view(), name='workflow_job_template_launch'), + re_path(r'^(?P[0-9]+)/copy/$', WorkflowJobTemplateCopy.as_view(), name='workflow_job_template_copy'), + re_path(r'^(?P[0-9]+)/schedules/$', WorkflowJobTemplateSchedulesList.as_view(), name='workflow_job_template_schedules_list'), + re_path(r'^(?P[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'), + re_path(r'^(?P[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'), + re_path(r'^(?P[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'), + re_path( r'^(?P[0-9]+)/notification_templates_started/$', WorkflowJobTemplateNotificationTemplatesStartedList.as_view(), name='workflow_job_template_notification_templates_started_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_error/$', WorkflowJobTemplateNotificationTemplatesErrorList.as_view(), name='workflow_job_template_notification_templates_error_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(), name='workflow_job_template_notification_templates_success_list', ), - url( + re_path( r'^(?P[0-9]+)/notification_templates_approvals/$', WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(), name='workflow_job_template_notification_templates_approvals_list', ), - url(r'^(?P[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'), - url(r'^(?P[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'), - url(r'^(?P[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'), - url(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'workflow_job_templates'}), + re_path(r'^(?P[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'), + re_path(r'^(?P[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'), + re_path(r'^(?P[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'), + re_path(r'^(?P[0-9]+)/', include('awx.api.urls.webhooks'), {'model_kwarg': 'workflow_job_templates'}), ] __all__ = ['urls'] diff --git a/awx/api/urls/workflow_job_template_node.py b/awx/api/urls/workflow_job_template_node.py index 868c728a88..bcd61aed67 100644 --- a/awx/api/urls/workflow_job_template_node.py +++ b/awx/api/urls/workflow_job_template_node.py @@ -1,7 +1,7 @@ # Copyright (c) 2017 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path from awx.api.views import ( WorkflowJobTemplateNodeList, @@ -15,13 +15,13 @@ from awx.api.views import ( urls = [ - url(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'), - url(r'^(?P[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'), - url(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobTemplateNodeSuccessNodesList.as_view(), name='workflow_job_template_node_success_nodes_list'), - url(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'), - url(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'), - url(r'^(?P[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'), - url(r'^(?P[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'), + re_path(r'^$', WorkflowJobTemplateNodeList.as_view(), name='workflow_job_template_node_list'), + re_path(r'^(?P[0-9]+)/$', WorkflowJobTemplateNodeDetail.as_view(), name='workflow_job_template_node_detail'), + re_path(r'^(?P[0-9]+)/success_nodes/$', WorkflowJobTemplateNodeSuccessNodesList.as_view(), name='workflow_job_template_node_success_nodes_list'), + re_path(r'^(?P[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'), + re_path(r'^(?P[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'), + re_path(r'^(?P[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'), + re_path(r'^(?P[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'), ] __all__ = ['urls'] diff --git a/awx/conf/urls.py b/awx/conf/urls.py index 61134d20b8..69d47b6afc 100644 --- a/awx/conf/urls.py +++ b/awx/conf/urls.py @@ -1,13 +1,13 @@ # Copyright (c) 2016 Ansible, Inc. # All Rights Reserved. +from django.urls import re_path -from django.conf.urls import url from awx.conf.views import SettingCategoryList, SettingSingletonDetail, SettingLoggingTest urlpatterns = [ - url(r'^$', SettingCategoryList.as_view(), name='setting_category_list'), - url(r'^(?P[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'), - url(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'), + re_path(r'^$', SettingCategoryList.as_view(), name='setting_category_list'), + re_path(r'^(?P[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'), + re_path(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'), ] diff --git a/awx/main/routing.py b/awx/main/routing.py index f470541443..2818559428 100644 --- a/awx/main/routing.py +++ b/awx/main/routing.py @@ -1,8 +1,8 @@ import redis import logging -from django.conf.urls import url from django.conf import settings +from django.urls import re_path from channels.auth import AuthMiddlewareStack from channels.routing import ProtocolTypeRouter, URLRouter @@ -27,8 +27,8 @@ class AWXProtocolTypeRouter(ProtocolTypeRouter): websocket_urlpatterns = [ - url(r'websocket/$', consumers.EventConsumer), - url(r'websocket/broadcast/$', consumers.BroadcastConsumer), + re_path(r'websocket/$', consumers.EventConsumer), + re_path(r'websocket/broadcast/$', consumers.BroadcastConsumer), ] application = AWXProtocolTypeRouter( diff --git a/awx/sso/urls.py b/awx/sso/urls.py index a32b11d6d6..93da0996c9 100644 --- a/awx/sso/urls.py +++ b/awx/sso/urls.py @@ -1,14 +1,15 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url +from django.urls import re_path + from awx.sso.views import sso_complete, sso_error, sso_inactive, saml_metadata app_name = 'sso' urlpatterns = [ - url(r'^complete/$', sso_complete, name='sso_complete'), - url(r'^error/$', sso_error, name='sso_error'), - url(r'^inactive/$', sso_inactive, name='sso_inactive'), - url(r'^metadata/saml/$', saml_metadata, name='saml_metadata'), + re_path(r'^complete/$', sso_complete, name='sso_complete'), + re_path(r'^error/$', sso_error, name='sso_error'), + re_path(r'^inactive/$', sso_inactive, name='sso_inactive'), + re_path(r'^metadata/saml/$', saml_metadata, name='saml_metadata'), ] diff --git a/awx/ui/urls.py b/awx/ui/urls.py index 068f2a020a..6661fee280 100644 --- a/awx/ui/urls.py +++ b/awx/ui/urls.py @@ -1,4 +1,4 @@ -from django.conf.urls import url +from django.urls import re_path from django.utils.translation import gettext_lazy as _ from django.views.generic.base import TemplateView @@ -27,4 +27,4 @@ class MigrationsNotran(TemplateView): app_name = 'ui' -urlpatterns = [url(r'^$', IndexView.as_view(), name='index'), url(r'^migrations_notran/$', MigrationsNotran.as_view(), name='migrations_notran')] +urlpatterns = [re_path(r'^$', IndexView.as_view(), name='index'), re_path(r'^migrations_notran/$', MigrationsNotran.as_view(), name='migrations_notran')] diff --git a/awx/urls.py b/awx/urls.py index 4424e8328a..c99eda011c 100644 --- a/awx/urls.py +++ b/awx/urls.py @@ -1,29 +1,30 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. -from django.conf.urls import url, include from django.conf import settings +from django.urls import re_path, include + from awx.main.views import handle_400, handle_403, handle_404, handle_500, handle_csp_violation, handle_login_redirect urlpatterns = [ - url(r'', include('awx.ui.urls', namespace='ui')), - url(r'^api/', include('awx.api.urls', namespace='api')), - url(r'^sso/', include('awx.sso.urls', namespace='sso')), - url(r'^sso/', include('social_django.urls', namespace='social')), - url(r'^(?:api/)?400.html$', handle_400), - url(r'^(?:api/)?403.html$', handle_403), - url(r'^(?:api/)?404.html$', handle_404), - url(r'^(?:api/)?500.html$', handle_500), - url(r'^csp-violation/', handle_csp_violation), - url(r'^login/', handle_login_redirect), + re_path(r'', include('awx.ui.urls', namespace='ui')), + re_path(r'^api/', include('awx.api.urls', namespace='api')), + re_path(r'^sso/', include('awx.sso.urls', namespace='sso')), + re_path(r'^sso/', include('social_django.urls', namespace='social')), + re_path(r'^(?:api/)?400.html$', handle_400), + re_path(r'^(?:api/)?403.html$', handle_403), + re_path(r'^(?:api/)?404.html$', handle_404), + re_path(r'^(?:api/)?500.html$', handle_500), + re_path(r'^csp-violation/', handle_csp_violation), + re_path(r'^login/', handle_login_redirect), ] if settings.SETTINGS_MODULE == 'awx.settings.development': try: import debug_toolbar - urlpatterns += [url(r'^__debug__/', include(debug_toolbar.urls))] + urlpatterns += [re_path(r'^__debug__/', include(debug_toolbar.urls))] except ImportError: pass From 0500512c3c6be1c808b6092f6b5d24e35a269d7a Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 7 Feb 2022 10:59:52 -0500 Subject: [PATCH 086/125] Fix up deprecation warning about get_response for middleware It will no longer be allowed to be None by Django 4.0 --- awx/main/middleware.py | 2 +- awx/main/tests/functional/api/test_user.py | 15 ++++---- awx/main/tests/functional/test_named_url.py | 4 +- awx/main/tests/functional/test_session.py | 42 --------------------- 4 files changed, 12 insertions(+), 51 deletions(-) diff --git a/awx/main/middleware.py b/awx/main/middleware.py index 3bbc7975ed..90739aebbe 100644 --- a/awx/main/middleware.py +++ b/awx/main/middleware.py @@ -103,7 +103,7 @@ def _customize_graph(): class URLModificationMiddleware(MiddlewareMixin): - def __init__(self, get_response=None): + def __init__(self, get_response): models = [m for m in apps.get_app_config('main').get_models() if hasattr(m, 'get_absolute_url')] generate_graph(models) _customize_graph() diff --git a/awx/main/tests/functional/api/test_user.py b/awx/main/tests/functional/api/test_user.py index a201d4e1cc..c19192c90c 100644 --- a/awx/main/tests/functional/api/test_user.py +++ b/awx/main/tests/functional/api/test_user.py @@ -1,4 +1,5 @@ from datetime import date +from unittest import mock import pytest @@ -17,7 +18,7 @@ EXAMPLE_USER_DATA = {"username": "affable", "first_name": "a", "last_name": "a", @pytest.mark.django_db def test_user_create(post, admin): - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 201 assert not response.data['is_superuser'] assert not response.data['is_system_auditor'] @@ -25,22 +26,22 @@ def test_user_create(post, admin): @pytest.mark.django_db def test_fail_double_create_user(post, admin): - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 201 - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 400 @pytest.mark.django_db def test_create_delete_create_user(post, delete, admin): - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 201 - response = delete(reverse('api:user_detail', kwargs={'pk': response.data['id']}), admin, middleware=SessionMiddleware()) + response = delete(reverse('api:user_detail', kwargs={'pk': response.data['id']}), admin, middleware=SessionMiddleware(mock.Mock())) assert response.status_code == 204 - response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware()) + response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) print(response.data) assert response.status_code == 201 @@ -48,7 +49,7 @@ def test_create_delete_create_user(post, delete, admin): @pytest.mark.django_db def test_user_cannot_update_last_login(patch, admin): assert admin.last_login is None - patch(reverse('api:user_detail', kwargs={'pk': admin.pk}), {'last_login': '2020-03-13T16:39:47.303016Z'}, admin, middleware=SessionMiddleware()) + patch(reverse('api:user_detail', kwargs={'pk': admin.pk}), {'last_login': '2020-03-13T16:39:47.303016Z'}, admin, middleware=SessionMiddleware(mock.Mock())) assert User.objects.get(pk=admin.pk).last_login is None diff --git a/awx/main/tests/functional/test_named_url.py b/awx/main/tests/functional/test_named_url.py index 7df38aa4e1..884ecd7dc0 100644 --- a/awx/main/tests/functional/test_named_url.py +++ b/awx/main/tests/functional/test_named_url.py @@ -1,4 +1,6 @@ # -*- coding: utf-8 -*- +from unittest import mock + import pytest from django.core.exceptions import ImproperlyConfigured @@ -31,7 +33,7 @@ def setup_module(module): # in unit test environment. So it is wrapped by try-except block to mute any # unwanted exceptions. try: - URLModificationMiddleware() + URLModificationMiddleware(mock.Mock()) except ImproperlyConfigured: pass diff --git a/awx/main/tests/functional/test_session.py b/awx/main/tests/functional/test_session.py index f9eb4c42a4..157000d1ab 100644 --- a/awx/main/tests/functional/test_session.py +++ b/awx/main/tests/functional/test_session.py @@ -1,16 +1,12 @@ from importlib import import_module import pytest -import re from django.conf import settings from django.test.utils import override_settings -from django.contrib.sessions.middleware import SessionMiddleware from django.contrib.sessions.models import Session from django.contrib.auth import SESSION_KEY from unittest import mock -from awx.api.versioning import reverse - class AlwaysPassBackend(object): @@ -30,26 +26,6 @@ def test_login_json_not_allowed(get, accept, status): get('/api/login/', HTTP_ACCEPT=accept, expect=status) -@pytest.mark.skip(reason="Needs Update - CA") -@pytest.mark.django_db -def test_session_create_delete(admin, post, get): - AlwaysPassBackend.user = admin - with override_settings(AUTHENTICATION_BACKENDS=(AlwaysPassBackend.get_backend_path(),), SESSION_COOKIE_NAME='session_id'): - response = post( - '/api/login/', - data={'username': admin.username, 'password': admin.password, 'next': '/api/'}, - expect=302, - middleware=SessionMiddleware(), - format='multipart', - ) - assert 'session_id' in response.cookies - session_key = re.findall(r'session_id=[a-zA-z0-9]+', str(response.cookies['session_id']))[0][len('session_id=') :] - session = Session.objects.get(session_key=session_key) - assert int(session.get_decoded()[SESSION_KEY]) == admin.pk - response = get('/api/logout/', middleware=SessionMiddleware(), cookies={'session_id': session_key}, expect=302) - assert not Session.objects.filter(session_key=session_key).exists() - - @pytest.mark.django_db @mock.patch('awx.main.consumers.emit_channel_notification') def test_sessions_unlimited(emit, admin): @@ -81,21 +57,3 @@ def test_session_overlimit(emit, admin, alice): store = import_module(settings.SESSION_ENGINE).SessionStore() store.create_model_instance({SESSION_KEY: alice.pk}).save() assert Session.objects.count() == 4 - - -@pytest.mark.skip(reason="Needs Update - CA") -@pytest.mark.django_db -def test_password_update_clears_sessions(admin, alice, post, patch): - AlwaysPassBackend.user = alice - with override_settings(AUTHENTICATION_BACKENDS=(AlwaysPassBackend.get_backend_path(),), SESSION_COOKIE_NAME='session_id'): - response = post( - '/api/login/', - data={'username': alice.username, 'password': alice.password, 'next': '/api/'}, - expect=302, - middleware=SessionMiddleware(), - format='multipart', - ) - session_key = re.findall(r'session_id=[a-zA-z0-9]+', str(response.cookies['session_id']))[0][len('session_id=') :] - assert Session.objects.filter(session_key=session_key).exists() - patch(reverse('api:user_detail', kwargs={'pk': alice.pk}), admin, data={'password': 'new_password'}, expect=200) - assert not Session.objects.filter(session_key=session_key).exists() From 028f09002f414c8826045f56c110e52604bfbfae Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 10 Feb 2022 14:00:11 -0500 Subject: [PATCH 087/125] Fix the cleanup_jobs management command It previously depended on a private Django internal class that changed with Django 3.1. I've switched here instead to disabling the django-polymorphic accessors to get the underlying UnifiedJob object for a Job, which due to the way they implement those was resulting in N+1 behavior on deletes. This gets us back most of the way to the performance gains we achieved with the custom collector class. See https://github.com/django-polymorphic/django-polymorphic/issues/198. --- awx/main/management/commands/cleanup_jobs.py | 114 +++++------ awx/main/registrar.py | 2 +- .../functional/commands/test_cleanup_jobs.py | 178 ------------------ awx/main/utils/deletion.py | 173 ----------------- tools/scripts/firehose.py | 5 +- 5 files changed, 62 insertions(+), 410 deletions(-) delete mode 100644 awx/main/tests/functional/commands/test_cleanup_jobs.py delete mode 100644 awx/main/utils/deletion.py diff --git a/awx/main/management/commands/cleanup_jobs.py b/awx/main/management/commands/cleanup_jobs.py index c9c508c6e8..dec5ca6e50 100644 --- a/awx/main/management/commands/cleanup_jobs.py +++ b/awx/main/management/commands/cleanup_jobs.py @@ -11,13 +11,12 @@ import re # Django from django.core.management.base import BaseCommand, CommandError from django.db import transaction, connection +from django.db.models import Min, Max +from django.db.models.signals import pre_save, post_save, pre_delete, post_delete, m2m_changed from django.utils.timezone import now # AWX from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob, WorkflowJob, Notification -from awx.main.signals import disable_activity_stream, disable_computed_fields - -from awx.main.utils.deletion import AWXCollector, pre_delete def unified_job_class_to_event_table_name(job_class): @@ -80,7 +79,6 @@ class DeleteMeta: ).count() def identify_excluded_partitions(self): - part_drop = {} for pk, status, created in self.jobs_qs: @@ -94,7 +92,7 @@ class DeleteMeta: # Note that parts_no_drop _may_ contain the names of partitions that don't exist # This can happen when the cleanup of _unpartitioned_* logic leaves behind jobs with status pending, waiting, running. The find_jobs_to_delete() will # pick these jobs up. - self.parts_no_drop = set([k for k, v in part_drop.items() if v is False]) + self.parts_no_drop = {k for k, v in part_drop.items() if v is False} def delete_jobs(self): if not self.dry_run: @@ -116,7 +114,7 @@ class DeleteMeta: partitions_dt = [p for p in partitions_dt if not None] # convert datetime partition back to string partition - partitions_maybe_drop = set([dt_to_partition_name(tbl_name, dt) for dt in partitions_dt]) + partitions_maybe_drop = {dt_to_partition_name(tbl_name, dt) for dt in partitions_dt} # Do not drop partition if there is a job that will not be deleted pointing at it self.parts_to_drop = partitions_maybe_drop - self.parts_no_drop @@ -164,6 +162,15 @@ class Command(BaseCommand): parser.add_argument('--notifications', dest='only_notifications', action='store_true', default=False, help='Remove notifications') parser.add_argument('--workflow-jobs', default=False, action='store_true', dest='only_workflow_jobs', help='Remove workflow jobs') + def init_logging(self): + log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0])) + self.logger = logging.getLogger('awx.main.commands.cleanup_jobs') + self.logger.setLevel(log_levels.get(self.verbosity, 0)) + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter('%(message)s')) + self.logger.addHandler(handler) + self.logger.propagate = False + def cleanup(self, job_class): delete_meta = DeleteMeta(self.logger, job_class, self.cutoff, self.dry_run) skipped, deleted = delete_meta.delete() @@ -193,7 +200,7 @@ class Command(BaseCommand): return (delete_meta.jobs_no_delete_count, delete_meta.jobs_to_delete_count) def _cascade_delete_job_events(self, model, pk_list): - if len(pk_list) > 0: + if pk_list: with connection.cursor() as cursor: tblname = unified_job_class_to_event_table_name(model) @@ -202,37 +209,30 @@ class Command(BaseCommand): cursor.execute(f"DELETE FROM _unpartitioned_{tblname} WHERE {rel_name} IN ({pk_list_csv})") def cleanup_jobs(self): - skipped, deleted = 0, 0 + batch_size = 100000 - batch_size = 1000000 + # Hack to avoid doing N+1 queries as each item in the Job query set does + # an individual query to get the underlying UnifiedJob. + Job.polymorphic_super_sub_accessors_replaced = True - while True: - # get queryset for available jobs to remove - qs = Job.objects.filter(created__lt=self.cutoff).exclude(status__in=['pending', 'waiting', 'running']) - # get pk list for the first N (batch_size) objects - pk_list = qs[0:batch_size].values_list('pk', flat=True) - # You cannot delete queries with sql LIMIT set, so we must - # create a new query from this pk_list - qs_batch = Job.objects.filter(pk__in=pk_list) - just_deleted = 0 - if not self.dry_run: + skipped = (Job.objects.filter(created__gte=self.cutoff) | Job.objects.filter(status__in=['pending', 'waiting', 'running'])).count() + + qs = Job.objects.select_related('unifiedjob_ptr').filter(created__lt=self.cutoff).exclude(status__in=['pending', 'waiting', 'running']) + if self.dry_run: + deleted = qs.count() + return skipped, deleted + + deleted = 0 + info = qs.aggregate(min=Min('id'), max=Max('id')) + if info['min'] is not None: + for start in range(info['min'], info['max'] + 1, batch_size): + qs_batch = qs.filter(id__gte=start, id__lte=start + batch_size) + pk_list = qs_batch.values_list('id', flat=True) + + _, results = qs_batch.delete() + deleted += results['main.Job'] self._cascade_delete_job_events(Job, pk_list) - del_query = pre_delete(qs_batch) - collector = AWXCollector(del_query.db) - collector.collect(del_query) - _, models_deleted = collector.delete() - if models_deleted: - just_deleted = models_deleted['main.Job'] - deleted += just_deleted - else: - just_deleted = 0 # break from loop, this is dry run - deleted = qs.count() - - if just_deleted == 0: - break - - skipped += (Job.objects.filter(created__gte=self.cutoff) | Job.objects.filter(status__in=['pending', 'waiting', 'running'])).count() return skipped, deleted def cleanup_ad_hoc_commands(self): @@ -339,15 +339,6 @@ class Command(BaseCommand): skipped += SystemJob.objects.filter(created__gte=self.cutoff).count() return skipped, deleted - def init_logging(self): - log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0])) - self.logger = logging.getLogger('awx.main.commands.cleanup_jobs') - self.logger.setLevel(log_levels.get(self.verbosity, 0)) - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter('%(message)s')) - self.logger.addHandler(handler) - self.logger.propagate = False - def cleanup_workflow_jobs(self): skipped, deleted = 0, 0 workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff) @@ -398,6 +389,7 @@ class Command(BaseCommand): self.cutoff = now() - datetime.timedelta(days=self.days) except OverflowError: raise CommandError('--days specified is too large. Try something less than 99999 (about 270 years).') + model_names = ('jobs', 'ad_hoc_commands', 'project_updates', 'inventory_updates', 'management_jobs', 'workflow_jobs', 'notifications') models_to_cleanup = set() for m in model_names: @@ -405,18 +397,28 @@ class Command(BaseCommand): models_to_cleanup.add(m) if not models_to_cleanup: models_to_cleanup.update(model_names) - with disable_activity_stream(), disable_computed_fields(): - for m in model_names: - if m in models_to_cleanup: - skipped, deleted = getattr(self, 'cleanup_%s' % m)() - func = getattr(self, 'cleanup_%s_partition' % m, None) - if func: - skipped_partition, deleted_partition = func() - skipped += skipped_partition - deleted += deleted_partition + # Completely disconnect all signal handlers. This is very aggressive, + # but it will be ok since this command is run in its own process. The + # core of the logic is borrowed from Signal.disconnect(). + for s in (pre_save, post_save, pre_delete, post_delete, m2m_changed): + with s.lock: + del s.receivers[:] + s.sender_receivers_cache.clear() - if self.dry_run: - self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped) - else: - self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped) + for m in model_names: + if m not in models_to_cleanup: + continue + + skipped, deleted = getattr(self, 'cleanup_%s' % m)() + + func = getattr(self, 'cleanup_%s_partition' % m, None) + if func: + skipped_partition, deleted_partition = func() + skipped += skipped_partition + deleted += deleted_partition + + if self.dry_run: + self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped) + else: + self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped) diff --git a/awx/main/registrar.py b/awx/main/registrar.py index 07e721a953..31133f936b 100644 --- a/awx/main/registrar.py +++ b/awx/main/registrar.py @@ -32,7 +32,7 @@ class ActivityStreamRegistrar(object): post_save.disconnect(dispatch_uid=str(self.__class__) + str(model) + "_create") pre_save.disconnect(dispatch_uid=str(self.__class__) + str(model) + "_update") pre_delete.disconnect(dispatch_uid=str(self.__class__) + str(model) + "_delete") - self.models.pop(model) + self.models.remove(model) for m2mfield in model._meta.many_to_many: m2m_attr = getattr(model, m2mfield.name) diff --git a/awx/main/tests/functional/commands/test_cleanup_jobs.py b/awx/main/tests/functional/commands/test_cleanup_jobs.py deleted file mode 100644 index 612895559a..0000000000 --- a/awx/main/tests/functional/commands/test_cleanup_jobs.py +++ /dev/null @@ -1,178 +0,0 @@ -import pytest -from datetime import datetime, timedelta -from pytz import timezone -from collections import OrderedDict -from unittest import mock - -from django.db.models.deletion import Collector, SET_NULL, CASCADE -from django.core.management import call_command - -from awx.main.management.commands import cleanup_jobs -from awx.main.utils.deletion import AWXCollector -from awx.main.models import JobTemplate, User, Job, Notification, WorkflowJobNode, JobHostSummary - - -@pytest.fixture -def setup_environment(inventory, project, machine_credential, host, notification_template, label): - """ - Create old jobs and new jobs, with various other objects to hit the - related fields of Jobs. This makes sure on_delete() effects are tested - properly. - """ - old_jobs = [] - new_jobs = [] - days = 10 - days_str = str(days) - - jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project) - jt.credentials.add(machine_credential) - jt_user = User.objects.create(username='jobtemplateuser') - jt.execute_role.members.add(jt_user) - - notification = Notification() - notification.notification_template = notification_template - notification.save() - - for i in range(3): - # create jobs with current time - job1 = jt.create_job() - job1.created = datetime.now(tz=timezone('UTC')) - job1.save() - # sqlite does not support partitioning so we cannot test partition-based jobevent cleanup - # JobEvent.create_from_data(job_id=job1.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save() - new_jobs.append(job1) - - # create jobs 10 days ago - job2 = jt.create_job() - job2.created = datetime.now(tz=timezone('UTC')) - timedelta(days=days) - job2.save() - job2.dependent_jobs.add(job1) - # JobEvent.create_from_data(job_id=job2.pk, uuid='abc123', event='runner_on_start', stdout='a' * 1025).save() - old_jobs.append(job2) - - jt.last_job = job2 - jt.current_job = job2 - jt.save() - host.last_job = job2 - host.save() - notification.unifiedjob_notifications.add(job2) - label.unifiedjob_labels.add(job2) - jn = WorkflowJobNode.objects.create(job=job2) - jn.save() - jh = JobHostSummary.objects.create(job=job2) - jh.save() - - return (old_jobs, new_jobs, days_str) - - -# sqlite does not support table partitioning so we mock out the methods responsible for pruning -# job event partitions during the job cleanup task -# https://github.com/ansible/awx/issues/9039 -@pytest.mark.django_db -@mock.patch.object(cleanup_jobs.DeleteMeta, 'identify_excluded_partitions', mock.MagicMock()) -@mock.patch.object(cleanup_jobs.DeleteMeta, 'find_partitions_to_drop', mock.MagicMock()) -@mock.patch.object(cleanup_jobs.DeleteMeta, 'drop_partitions', mock.MagicMock()) -def test_cleanup_jobs(setup_environment): - (old_jobs, new_jobs, days_str) = setup_environment - - # related_fields - related = [f for f in Job._meta.get_fields(include_hidden=True) if f.auto_created and not f.concrete and (f.one_to_one or f.one_to_many)] - - job = old_jobs[-1] # last job - - # gather related objects for job - related_should_be_removed = {} - related_should_be_null = {} - for r in related: - qs = r.related_model._base_manager.using('default').filter(**{"%s__in" % r.field.name: [job.pk]}) - if qs.exists(): - if r.field.remote_field.on_delete == CASCADE: - related_should_be_removed[qs.model] = set(qs.values_list('pk', flat=True)) - if r.field.remote_field.on_delete == SET_NULL: - related_should_be_null[(qs.model, r.field.name)] = set(qs.values_list('pk', flat=True)) - - assert related_should_be_removed - assert related_should_be_null - - call_command('cleanup_jobs', '--days', days_str) - # make sure old jobs are removed - assert not Job.objects.filter(pk__in=[obj.pk for obj in old_jobs]).exists() - - # make sure new jobs are untouched - assert len(new_jobs) == Job.objects.filter(pk__in=[obj.pk for obj in new_jobs]).count() - - # make sure related objects are destroyed or set to NULL (none) - for model, values in related_should_be_removed.items(): - assert not model.objects.filter(pk__in=values).exists() - - for (model, fieldname), values in related_should_be_null.items(): - for v in values: - assert not getattr(model.objects.get(pk=v), fieldname) - - -@pytest.mark.django_db -def test_awxcollector(setup_environment): - """ - Efforts to improve the performance of cleanup_jobs involved - sub-classing the django Collector class. This unit test will - check for parity between the django Collector and the modified - AWXCollector class. AWXCollector is used in cleanup_jobs to - bulk-delete old jobs from the database. - - Specifically, Collector has four dictionaries to check: - .dependencies, .data, .fast_deletes, and .field_updates - - These tests will convert each dictionary from AWXCollector - (after running .collect on jobs), from querysets to sets of - objects. The final result should be a dictionary that is - equivalent to django's Collector. - """ - - (old_jobs, new_jobs, days_str) = setup_environment - collector = Collector('default') - collector.collect(old_jobs) - - awx_col = AWXCollector('default') - # awx_col accepts a queryset as input - awx_col.collect(Job.objects.filter(pk__in=[obj.pk for obj in old_jobs])) - - # check that dependencies are the same - assert awx_col.dependencies == collector.dependencies - - # check that objects to delete are the same - awx_del_dict = OrderedDict() - for model, instances in awx_col.data.items(): - awx_del_dict.setdefault(model, set()) - for inst in instances: - # .update() will put each object in a queryset into the set - awx_del_dict[model].update(inst) - assert awx_del_dict == collector.data - - # check that field updates are the same - awx_del_dict = OrderedDict() - for model, instances_for_fieldvalues in awx_col.field_updates.items(): - awx_del_dict.setdefault(model, {}) - for (field, value), instances in instances_for_fieldvalues.items(): - awx_del_dict[model].setdefault((field, value), set()) - for inst in instances: - awx_del_dict[model][(field, value)].update(inst) - - # collector field updates don't use the base (polymorphic parent) model, e.g. - # it will use JobTemplate instead of UnifiedJobTemplate. Therefore, - # we need to rebuild the dictionary and grab the model from the field - collector_del_dict = OrderedDict() - for model, instances_for_fieldvalues in collector.field_updates.items(): - for (field, value), instances in instances_for_fieldvalues.items(): - collector_del_dict.setdefault(field.model, {}) - collector_del_dict[field.model][(field, value)] = collector.field_updates[model][(field, value)] - assert awx_del_dict == collector_del_dict - - # check that fast deletes are the same - collector_fast_deletes = set() - for q in collector.fast_deletes: - collector_fast_deletes.update(q) - - awx_col_fast_deletes = set() - for q in awx_col.fast_deletes: - awx_col_fast_deletes.update(q) - assert collector_fast_deletes == awx_col_fast_deletes diff --git a/awx/main/utils/deletion.py b/awx/main/utils/deletion.py deleted file mode 100644 index d17bc0b710..0000000000 --- a/awx/main/utils/deletion.py +++ /dev/null @@ -1,173 +0,0 @@ -from django.contrib.contenttypes.models import ContentType -from django.db.models.deletion import ( - DO_NOTHING, - Collector, - get_candidate_relations_to_delete, -) -from collections import Counter, OrderedDict -from django.db import transaction -from django.db.models import sql - - -def bulk_related_objects(field, objs, using): - # This overrides the method in django.contrib.contenttypes.fields.py - """ - Return all objects related to ``objs`` via this ``GenericRelation``. - """ - return field.remote_field.model._base_manager.db_manager(using).filter( - **{ - "%s__pk" - % field.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(field.model, for_concrete_model=field.for_concrete_model).pk, - "%s__in" % field.object_id_field_name: list(objs.values_list('pk', flat=True)), - } - ) - - -def pre_delete(qs): - # taken from .delete method in django.db.models.query.py - assert qs.query.can_filter(), "Cannot use 'limit' or 'offset' with delete." - - if qs._fields is not None: - raise TypeError("Cannot call delete() after .values() or .values_list()") - - del_query = qs._chain() - - # The delete is actually 2 queries - one to find related objects, - # and one to delete. Make sure that the discovery of related - # objects is performed on the same database as the deletion. - del_query._for_write = True - - # Disable non-supported fields. - del_query.query.select_for_update = False - del_query.query.select_related = False - del_query.query.clear_ordering(force_empty=True) - return del_query - - -class AWXCollector(Collector): - def add(self, objs, source=None, nullable=False, reverse_dependency=False): - """ - Add 'objs' to the collection of objects to be deleted. If the call is - the result of a cascade, 'source' should be the model that caused it, - and 'nullable' should be set to True if the relation can be null. - - Return a list of all objects that were not already collected. - """ - if not objs.exists(): - return objs - model = objs.model - self.data.setdefault(model, []) - self.data[model].append(objs) - # Nullable relationships can be ignored -- they are nulled out before - # deleting, and therefore do not affect the order in which objects have - # to be deleted. - if source is not None and not nullable: - if reverse_dependency: - source, model = model, source - self.dependencies.setdefault(source._meta.concrete_model, set()).add(model._meta.concrete_model) - return objs - - def add_field_update(self, field, value, objs): - """ - Schedule a field update. 'objs' must be a homogeneous iterable - collection of model instances (e.g. a QuerySet). - """ - if not objs.exists(): - return - model = objs.model - self.field_updates.setdefault(model, {}) - self.field_updates[model].setdefault((field, value), []) - self.field_updates[model][(field, value)].append(objs) - - def collect(self, objs, source=None, nullable=False, collect_related=True, source_attr=None, reverse_dependency=False, keep_parents=False): - """ - Add 'objs' to the collection of objects to be deleted as well as all - parent instances. 'objs' must be a homogeneous iterable collection of - model instances (e.g. a QuerySet). If 'collect_related' is True, - related objects will be handled by their respective on_delete handler. - - If the call is the result of a cascade, 'source' should be the model - that caused it and 'nullable' should be set to True, if the relation - can be null. - - If 'reverse_dependency' is True, 'source' will be deleted before the - current model, rather than after. (Needed for cascading to parent - models, the one case in which the cascade follows the forwards - direction of an FK rather than the reverse direction.) - - If 'keep_parents' is True, data of parent model's will be not deleted. - """ - - if hasattr(objs, 'polymorphic_disabled'): - objs.polymorphic_disabled = True - - if self.can_fast_delete(objs): - self.fast_deletes.append(objs) - return - new_objs = self.add(objs, source, nullable, reverse_dependency=reverse_dependency) - if not new_objs.exists(): - return - - model = new_objs.model - - if not keep_parents: - # Recursively collect concrete model's parent models, but not their - # related objects. These will be found by meta.get_fields() - concrete_model = model._meta.concrete_model - for ptr in concrete_model._meta.parents.keys(): - if ptr: - parent_objs = ptr.objects.filter(pk__in=new_objs.values_list('pk', flat=True)) - self.collect(parent_objs, source=model, collect_related=False, reverse_dependency=True) - if collect_related: - parents = model._meta.parents - for related in get_candidate_relations_to_delete(model._meta): - # Preserve parent reverse relationships if keep_parents=True. - if keep_parents and related.model in parents: - continue - field = related.field - if field.remote_field.on_delete == DO_NOTHING: - continue - related_qs = self.related_objects(related, new_objs) - if self.can_fast_delete(related_qs, from_field=field): - self.fast_deletes.append(related_qs) - elif related_qs: - field.remote_field.on_delete(self, field, related_qs, self.using) - for field in model._meta.private_fields: - if hasattr(field, 'bulk_related_objects'): - # It's something like generic foreign key. - sub_objs = bulk_related_objects(field, new_objs, self.using) - self.collect(sub_objs, source=model, nullable=True) - - def delete(self): - self.sort() - - # collect pk_list before deletion (once things start to delete - # queries might not be able to retreive pk list) - del_dict = OrderedDict() - for model, instances in self.data.items(): - del_dict.setdefault(model, []) - for inst in instances: - del_dict[model] += list(inst.values_list('pk', flat=True)) - - deleted_counter = Counter() - - with transaction.atomic(using=self.using, savepoint=False): - - # update fields - for model, instances_for_fieldvalues in self.field_updates.items(): - for (field, value), instances in instances_for_fieldvalues.items(): - for inst in instances: - query = sql.UpdateQuery(model) - query.update_batch(inst.values_list('pk', flat=True), {field.name: value}, self.using) - # fast deletes - for qs in self.fast_deletes: - count = qs._raw_delete(using=self.using) - deleted_counter[qs.model._meta.label] += count - - # delete instances - for model, pk_list in del_dict.items(): - query = sql.DeleteQuery(model) - count = query.delete_batch(pk_list, self.using) - deleted_counter[model._meta.label] += count - - return sum(deleted_counter.values()), dict(deleted_counter) diff --git a/tools/scripts/firehose.py b/tools/scripts/firehose.py index cd5930315a..2eeeb5da7b 100755 --- a/tools/scripts/firehose.py +++ b/tools/scripts/firehose.py @@ -318,8 +318,9 @@ if __name__ == '__main__': for j_hour in range(24): time_delta = datetime.timedelta(days=i_day, hours=j_hour, seconds=0) created_job_ids = generate_jobs(jobs, batch_size=batch_size, time_delta=time_delta) - for k_id in created_job_ids: - generate_events(events, str(k_id), time_delta) + if events > 0: + for k_id in created_job_ids: + generate_events(events, str(k_id), time_delta) print(datetime.datetime.utcnow().isoformat()) conn.close() From 676b8f6d8ff85c10e66cebe0a471d3d97434a6c4 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Mon, 14 Feb 2022 15:13:10 -0500 Subject: [PATCH 088/125] Implement an out-of-band migration to change the json fields --- awx/main/models/__init__.py | 88 +++++++++++++++++++++++++ awx/main/tasks/system.py | 125 +++++++++++++++++++++++++++++++++++- 2 files changed, 212 insertions(+), 1 deletion(-) diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index ed49b98083..107c7a9418 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -3,6 +3,7 @@ # Django from django.conf import settings # noqa +from django.db import connection from django.db.models.signals import pre_delete # noqa # AWX @@ -97,6 +98,93 @@ User.add_to_class('can_access_with_errors', check_user_access_with_errors) User.add_to_class('accessible_objects', user_accessible_objects) +def convert_jsonfields_to_jsonb(): + if connection.vendor != 'postgresql': + return + + # fmt: off + fields = [ # Table name, expensive or not, tuple of column names + ('conf_setting', False, ( + 'value', + )), + ('main_instancegroup', False, ( + 'policy_instance_list', + )), + ('main_jobtemplate', False, ( + 'survey_spec', + )), + ('main_notificationtemplate', False, ( + 'notification_configuration', + 'messages', + )), + ('main_project', False, ( + 'playbook_files', + 'inventory_files', + )), + ('main_schedule', False, ( + 'extra_data', + 'char_prompts', + 'survey_passwords', + )), + ('main_workflowjobtemplate', False, ( + 'survey_spec', + 'char_prompts', + )), + ('main_workflowjobtemplatenode', False, ( + 'char_prompts', + 'extra_data', + 'survey_passwords', + )), + ('main_activitystream', True, ( + 'setting', # NN = NOT NULL + 'deleted_actor', + )), + ('main_job', True, ( + 'survey_passwords', # NN + 'artifacts', # NN + )), + ('main_joblaunchconfig', True, ( + 'extra_data', # NN + 'survey_passwords', # NN + 'char_prompts', # NN + )), + ('main_notification', True, ( + 'body', # NN + )), + ('main_unifiedjob', True, ( + 'job_env', # NN + )), + ('main_workflowjob', True, ( + 'survey_passwords', # NN + 'char_prompts', # NN + )), + ('main_workflowjobnode', True, ( + 'char_prompts', # NN + 'ancestor_artifacts', # NN + 'extra_data', # NN + 'survey_passwords', # NN + )), + ] + # fmt: on + + with connection.cursor() as cursor: + for table, expensive, columns in fields: + cursor.execute( + """ + select count(1) from information_schema.columns + where + table_name = %s and + column_name in %s and + data_type != 'jsonb'; + """, + (table, columns), + ) + if cursor.fetchone()[0]: + from awx.main.tasks.system import migrate_json_fields + + migrate_json_fields.apply_async([table, expensive, columns]) + + def cleanup_created_modified_by(sender, **kwargs): # work around a bug in django-polymorphic that doesn't properly # handle cascades for reverse foreign keys on the polymorphic base model diff --git a/awx/main/tasks/system.py b/awx/main/tasks/system.py index 4e1b9eceed..fd47d63a3b 100644 --- a/awx/main/tasks/system.py +++ b/awx/main/tasks/system.py @@ -1,5 +1,6 @@ # Python from collections import namedtuple +import itertools import functools import importlib import json @@ -13,7 +14,7 @@ from distutils.version import LooseVersion as Version # Django from django.conf import settings -from django.db import transaction, DatabaseError, IntegrityError +from django.db import connection, transaction, DatabaseError, IntegrityError from django.db.models.fields.related import ForeignKey from django.utils.timezone import now from django.utils.encoding import smart_str @@ -22,6 +23,7 @@ from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_noop from django.core.cache import cache from django.core.exceptions import ObjectDoesNotExist +from django.contrib.contenttypes.models import ContentType # Django-CRUM from crum import impersonate @@ -46,6 +48,7 @@ from awx.main.models import ( Inventory, SmartInventoryMembership, Job, + convert_jsonfields_to_jsonb, ) from awx.main.constants import ACTIVE_STATES from awx.main.dispatch.publish import task @@ -78,6 +81,9 @@ Try upgrading OpenSSH or providing your private key in an different format. \ def dispatch_startup(): startup_logger = logging.getLogger('awx.main.tasks') + + convert_jsonfields_to_jsonb() + startup_logger.debug("Syncing Schedules") for sch in Schedule.objects.all(): try: @@ -121,6 +127,123 @@ def inform_cluster_of_shutdown(): logger.exception('Encountered problem with normal shutdown signal.') +def migrate_json_fields_expensive(table, columns): + batchsize = 50000 + + ct = ContentType.objects.get_by_natural_key(*table.split('_', 1)) + model = ct.model_class() + + # Phase 1: add the new columns, making them nullable to avoid populating them + with connection.schema_editor() as schema_editor: + # See: https://docs.djangoproject.com/en/3.1/ref/schema-editor/ + + for colname in columns: + f = model._meta.get_field(colname) + _, _, args, kwargs = f.deconstruct() + kwargs['null'] = True + new_f = f.__class__(*args, **kwargs) + new_f.set_attributes_from_name(f'_{colname}') + + schema_editor.add_field(model, new_f) + + # Create a trigger to make sure new data automatically gets put in both fields. + with connection.cursor() as cursor: + # It's a little annoying, I think this trigger will re-do + # the same work as the update query in Phase 2 + cursor.execute( + f""" + create or replace function update_{table}_{colname}() + returns trigger as $body$ + begin + new._{colname} = new.{colname}::jsonb + return new; + end + $body$ language plpgsql; + """ + ) + cursor.execute( + f""" + create trigger {table}_{colname}_trigger + before insert or update + on {table} + for each row + execute procedure update_{table}_{colname}; + """ + ) + + # Phase 2: copy over the data + with connection.cursor() as cursor: + rows = 0 + for i in itertools.count(0, batchsize): + cursor.execute(f"select count(1) from {table} where id >= %s;", (i,)) + if not cursor.fetchone()[0]: + break + + column_expr = ', '.join(f"_{colname} = {colname}::jsonb" for colname in columns) + cursor.execute( + f""" + update {table} + set {column_expr} + where id >= %s and id < %s; + """, + (i, i + batchsize), + ) + rows += cursor.rowcount + logger.debug(f"Batch {i} to {i + batchsize} copied on {table}.") + + logger.warning(f"Data copied for {rows} rows on {table}.") + + # Phase 3: drop the old column and rename the new one + with connection.schema_editor() as schema_editor: + + # FIXME: Grab a lock explicitly here? + for colname in columns: + with connection.cursor() as cursor: + cursor.execute(f"drop trigger {table}_{colname}_trigger;") + cursor.execute(f"drop function update_{table}_{colname};") + + f = model._meta.get_field(colname) + _, _, args, kwargs = f.deconstruct() + kwargs['null'] = True + new_f = f.__class__(*args, **kwargs) + new_f.set_attributes_from_name(f'_{colname}') + + schema_editor.remove_field(model, f) + + _, _, args, kwargs = new_f.deconstruct() + f = new_f.__class__(*args, **kwargs) + f.set_attributes_from_name(colname) + + schema_editor.alter_field(model, new_f, f) + + +@task(queue=get_local_queuename) +def migrate_json_fields(table, expensive, columns): + logger.warning(f"Migrating json fields: {table} {columns}") + + with advisory_lock(f'json_migration_{table}', wait=False) as acquired: + if not acquired: + return + + from django.db.migrations.executor import MigrationExecutor + + # If Django is currently running migrations, wait until it is done. + while True: + executor = MigrationExecutor(connection) + if not executor.migration_plan(executor.loader.graph.leaf_nodes()): + break + time.sleep(60) + + if expensive: + migrate_json_fields_expensive(table, columns) + else: + with connection.cursor() as cursor: + column_expr = " ".join(f"ALTER {colname} TYPE jsonb" for colname in columns) + cursor.execute(f"ALTER TABLE {table} {column_expr};") + + logger.warning(f"Migration of {table} to jsonb is finished") + + @task(queue=get_local_queuename) def apply_cluster_membership_policies(): from awx.main.signals import disable_activity_stream From b2fe1c46ee050985cfe40d3670fff76b27d02a45 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Tue, 8 Mar 2022 08:18:05 -0500 Subject: [PATCH 089/125] Fix playbook error when files do not exist. I was seeing "Failed to template loop_control.label: 'dict object' has no attribute 'path'" --- tools/docker-compose/ansible/roles/sources/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/docker-compose/ansible/roles/sources/tasks/main.yml b/tools/docker-compose/ansible/roles/sources/tasks/main.yml index a8809eb4e5..c7771d6b74 100644 --- a/tools/docker-compose/ansible/roles/sources/tasks/main.yml +++ b/tools/docker-compose/ansible/roles/sources/tasks/main.yml @@ -27,7 +27,7 @@ when: not lookup('vars', item.item, default='') and not item.stat.exists loop: "{{ secrets.results }}" loop_control: - label: '{{ item.stat.path }}' + label: '{{ item.item }}' - name: Include generated secrets unless they are explicitly passed in include_vars: "{{ sources_dest }}/secrets/{{ item.item }}.yml" From f04d7733bba22ad14b1f96a8de0df23154700a98 Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Tue, 8 Mar 2022 08:21:31 -0500 Subject: [PATCH 090/125] Add a CI check for the development environment --- .github/workflows/ci.yml | 32 ++++++++- tools/docker-compose/ansible/smoke-test.yml | 77 +++++++++++++++++++++ 2 files changed, 108 insertions(+), 1 deletion(-) create mode 100644 tools/docker-compose/ansible/smoke-test.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c9465e9c32..01e33079c8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,7 +5,7 @@ env: on: pull_request: jobs: - common_tests: + common-tests: name: ${{ matrix.tests.name }} runs-on: ubuntu-latest permissions: @@ -66,6 +66,36 @@ jobs: run: | docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \ --workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} ${{ matrix.tests.command }} + dev-env: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Get python version from Makefile + run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV + + - name: Install python ${{ env.py_version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ env.py_version }} + + - name: Log in to registry + run: | + echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin + + - name: Pre-pull image to warm build cache + run: | + docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || : + + - name: Build image + run: | + DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build + + - name: Run smoke test + run: | + export DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} + export COMPOSE_TAG=${{ env.BRANCH }} + ansible-playbook tools/docker-compose/ansible/smoke-test.yml -e repo_dir=$(pwd) -v awx-operator: runs-on: ubuntu-latest diff --git a/tools/docker-compose/ansible/smoke-test.yml b/tools/docker-compose/ansible/smoke-test.yml new file mode 100644 index 0000000000..79b0fc2ee2 --- /dev/null +++ b/tools/docker-compose/ansible/smoke-test.yml @@ -0,0 +1,77 @@ +--- +# +# This is used by a CI check in GitHub Actions and isnt really +# meant to be run locally. +# +# The development environment does some unfortunate things to +# make rootless podman work inside of a docker container. +# The goal here is to essentially tests that the awx user is +# able to run `podman run`. +# +- name: Test that the development environment is able to launch a job + hosts: localhost + tasks: + - name: Boot the development environment + command: | + make docker-compose + environment: + COMPOSE_UP_OPTS: -d + args: + chdir: "{{ repo_dir }}" + + # Takes a while for migrations to finish + - name: Wait for the dev environment to be ready + uri: + url: "http://localhost:8013/api/v2/ping/" + register: _result + until: _result.status == 200 + retries: 120 + delay: 5 + + - name: Reset admin password + shell: | + docker exec -i tools_awx_1 bash < Date: Tue, 8 Mar 2022 09:07:09 -0500 Subject: [PATCH 091/125] Do not remove artifacts for local work --- awx/main/tasks/receptor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/main/tasks/receptor.py b/awx/main/tasks/receptor.py index 5d58217b6f..b72d1547a7 100644 --- a/awx/main/tasks/receptor.py +++ b/awx/main/tasks/receptor.py @@ -326,7 +326,7 @@ class AWXReceptorJob: # Artifacts are an output, but sometimes they are an input as well # this is the case with fact cache, where clearing facts deletes a file, and this must be captured artifact_dir = os.path.join(self.runner_params['private_data_dir'], 'artifacts') - if os.path.exists(artifact_dir): + if self.work_type != 'local' and os.path.exists(artifact_dir): shutil.rmtree(artifact_dir) resultsock, resultfile = receptor_ctl.get_work_results(self.unit_id, return_socket=True, return_sockfile=True) From bd5c304a50402766ac6b11662d24a632bb220f2d Mon Sep 17 00:00:00 2001 From: Shane McDonald Date: Tue, 8 Mar 2022 12:05:50 -0500 Subject: [PATCH 092/125] Make our collection work with prefixed API endpoints I caught this when trying to add test coverage for https://github.com/ansible/awx/pull/11342 --- .../plugins/module_utils/controller_api.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/awx_collection/plugins/module_utils/controller_api.py b/awx_collection/plugins/module_utils/controller_api.py index 21562602a5..a48a72224e 100644 --- a/awx_collection/plugins/module_utils/controller_api.py +++ b/awx_collection/plugins/module_utils/controller_api.py @@ -133,6 +133,8 @@ class ControllerModule(AnsibleModule): # Try to parse the hostname as a url try: self.url = urlparse(self.host) + # Store URL prefix for later use in build_url + self.url_prefix = self.url.path except Exception as e: self.fail_json(msg="Unable to parse controller_host as a URL ({1}): {0}".format(self.host, e)) @@ -147,8 +149,9 @@ class ControllerModule(AnsibleModule): # Make sure we start with /api/vX if not endpoint.startswith("/"): endpoint = "/{0}".format(endpoint) - if not endpoint.startswith("/api/"): - endpoint = "/api/v2{0}".format(endpoint) + prefix = self.url_prefix.rstrip("/") + if not endpoint.startswith(prefix + "/api/"): + endpoint = prefix + "/api/v2{0}".format(endpoint) if not endpoint.endswith('/') and '?' not in endpoint: endpoint = "{0}/".format(endpoint) @@ -589,8 +592,10 @@ class ControllerAPIModule(ControllerModule): "application": None, "scope": "write", } + # Preserve URL prefix + endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/' # Post to the tokens endpoint with baisc auth to try and get a token - api_token_url = (self.url._replace(path='/api/v2/tokens/')).geturl() + api_token_url = (self.url._replace(path=endpoint)).geturl() try: response = self.session.open( @@ -954,9 +959,10 @@ class ControllerAPIModule(ControllerModule): if self.authenticated and self.oauth_token_id: # Attempt to delete our current token from /api/v2/tokens/ # Post to the tokens endpoint with baisc auth to try and get a token + endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/{0}/'.format(self.oauth_token_id) api_token_url = ( self.url._replace( - path='/api/v2/tokens/{0}/'.format(self.oauth_token_id), query=None # in error cases, fail_json exists before exception handling + path=endpoint, query=None # in error cases, fail_json exists before exception handling ) ).geturl() From 4c9d028a35dab930c8a9c59da70b8e4c8448a666 Mon Sep 17 00:00:00 2001 From: Marliana Lara Date: Tue, 8 Mar 2022 13:04:35 -0500 Subject: [PATCH 093/125] Disable checkbox while job is running in project and inventory source lists (#11841) --- .../Inventory/InventorySources/InventorySourceListItem.js | 2 ++ awx/ui/src/screens/Project/ProjectList/ProjectListItem.js | 1 + 2 files changed, 3 insertions(+) diff --git a/awx/ui/src/screens/Inventory/InventorySources/InventorySourceListItem.js b/awx/ui/src/screens/Inventory/InventorySources/InventorySourceListItem.js index d958f82a9e..1c435095b3 100644 --- a/awx/ui/src/screens/Inventory/InventorySources/InventorySourceListItem.js +++ b/awx/ui/src/screens/Inventory/InventorySources/InventorySourceListItem.js @@ -13,6 +13,7 @@ import { ActionsTd, ActionItem, TdBreakWord } from 'components/PaginatedTable'; import StatusLabel from 'components/StatusLabel'; import JobCancelButton from 'components/JobCancelButton'; import { formatDateString } from 'util/dates'; +import { isJobRunning } from 'util/jobs'; import InventorySourceSyncButton from '../shared/InventorySourceSyncButton'; const ExclamationTriangleIcon = styled(PFExclamationTriangleIcon)` @@ -64,6 +65,7 @@ function InventorySourceListItem({ rowIndex, isSelected, onSelect, + disable: isJobRunning(source.status), }} /> diff --git a/awx/ui/src/screens/Project/ProjectList/ProjectListItem.js b/awx/ui/src/screens/Project/ProjectList/ProjectListItem.js index fb213f15a1..80b547f955 100644 --- a/awx/ui/src/screens/Project/ProjectList/ProjectListItem.js +++ b/awx/ui/src/screens/Project/ProjectList/ProjectListItem.js @@ -172,6 +172,7 @@ function ProjectListItem({ rowIndex, isSelected, onSelect, + disable: isJobRunning(job?.status), }} dataLabel={t`Selected`} /> From 079eed2b9eb8f0150e046a6c6d292f7611b1143e Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Tue, 8 Mar 2022 10:13:47 -0800 Subject: [PATCH 094/125] Mock web worker. --- awx/ui/src/App.test.js | 1 + awx/ui/src/index.test.js | 1 + awx/ui/src/routeConfig.test.js | 1 + awx/ui/src/screens/TopologyView/MeshGraph.js | 6 ++---- awx/ui/src/screens/TopologyView/utils/webWorker.js | 3 +++ 5 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 awx/ui/src/screens/TopologyView/utils/webWorker.js diff --git a/awx/ui/src/App.test.js b/awx/ui/src/App.test.js index de080062fd..bdc71de49f 100644 --- a/awx/ui/src/App.test.js +++ b/awx/ui/src/App.test.js @@ -7,6 +7,7 @@ import { mountWithContexts } from '../testUtils/enzymeHelpers'; import App, { ProtectedRoute } from './App'; jest.mock('./api'); +jest.mock('screens/TopologyView/utils/WebWorker', () => jest.fn()); describe('', () => { beforeEach(() => { diff --git a/awx/ui/src/index.test.js b/awx/ui/src/index.test.js index 49ae9e2317..ffde7d7d9c 100644 --- a/awx/ui/src/index.test.js +++ b/awx/ui/src/index.test.js @@ -3,6 +3,7 @@ import ReactDOM from 'react-dom'; import App from './App'; jest.mock('react-dom', () => ({ render: jest.fn() })); +jest.mock('screens/TopologyView/utils/WebWorker', () => jest.fn()); describe('index.jsx', () => { it('renders ok', () => { diff --git a/awx/ui/src/routeConfig.test.js b/awx/ui/src/routeConfig.test.js index 35e0a5eae3..643bd13dfd 100644 --- a/awx/ui/src/routeConfig.test.js +++ b/awx/ui/src/routeConfig.test.js @@ -1,4 +1,5 @@ import getRouteConfig from './routeConfig'; +jest.mock('screens/TopologyView/utils/WebWorker', () => jest.fn()); const userProfile = { isSuperUser: false, diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index a88f5d6dc4..d643044e68 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -17,6 +17,7 @@ import { // generateRandomNodes, // getRandomInt, } from './utils/helpers'; +import webWorker from './utils/webWorker'; import { DEFAULT_RADIUS, DEFAULT_NODE_COLOR, @@ -59,10 +60,7 @@ function MeshGraph({ data, showLegend, zoom, setShowZoomControls }) { const graph = data; /* WEB WORKER */ - const worker = new Worker( - new URL('./utils/workers/simulationWorker.js', import.meta.url) - ); - + const worker = webWorker(); worker.postMessage({ nodes: graph.nodes, links: graph.links, diff --git a/awx/ui/src/screens/TopologyView/utils/webWorker.js b/awx/ui/src/screens/TopologyView/utils/webWorker.js new file mode 100644 index 0000000000..7cc564b1c5 --- /dev/null +++ b/awx/ui/src/screens/TopologyView/utils/webWorker.js @@ -0,0 +1,3 @@ +export default function webWorker() { + return new Worker(new URL('./workers/simulationWorker.js', import.meta.url)); +} From 7a6fd2623e6e674bed485858642514f431648561 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Tue, 8 Mar 2022 12:34:05 -0800 Subject: [PATCH 095/125] Move web worker out of /screens directory. --- awx/ui/src/App.test.js | 2 +- awx/ui/src/index.test.js | 2 +- awx/ui/src/routeConfig.test.js | 2 +- awx/ui/src/screens/TopologyView/MeshGraph.js | 2 +- awx/ui/src/screens/TopologyView/utils/webWorker.js | 3 --- awx/ui/src/util/webWorker.js | 8 ++++++++ 6 files changed, 12 insertions(+), 7 deletions(-) delete mode 100644 awx/ui/src/screens/TopologyView/utils/webWorker.js create mode 100644 awx/ui/src/util/webWorker.js diff --git a/awx/ui/src/App.test.js b/awx/ui/src/App.test.js index bdc71de49f..e1f2fb3bc3 100644 --- a/awx/ui/src/App.test.js +++ b/awx/ui/src/App.test.js @@ -7,7 +7,7 @@ import { mountWithContexts } from '../testUtils/enzymeHelpers'; import App, { ProtectedRoute } from './App'; jest.mock('./api'); -jest.mock('screens/TopologyView/utils/WebWorker', () => jest.fn()); +jest.mock('util/webWorker', () => jest.fn()); describe('', () => { beforeEach(() => { diff --git a/awx/ui/src/index.test.js b/awx/ui/src/index.test.js index ffde7d7d9c..a0419c9933 100644 --- a/awx/ui/src/index.test.js +++ b/awx/ui/src/index.test.js @@ -3,7 +3,7 @@ import ReactDOM from 'react-dom'; import App from './App'; jest.mock('react-dom', () => ({ render: jest.fn() })); -jest.mock('screens/TopologyView/utils/WebWorker', () => jest.fn()); +jest.mock('util/webWorker', () => jest.fn()); describe('index.jsx', () => { it('renders ok', () => { diff --git a/awx/ui/src/routeConfig.test.js b/awx/ui/src/routeConfig.test.js index 643bd13dfd..5a7def4348 100644 --- a/awx/ui/src/routeConfig.test.js +++ b/awx/ui/src/routeConfig.test.js @@ -1,5 +1,5 @@ import getRouteConfig from './routeConfig'; -jest.mock('screens/TopologyView/utils/WebWorker', () => jest.fn()); +jest.mock('util/webWorker', () => jest.fn()); const userProfile = { isSuperUser: false, diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index d643044e68..2d7700b45e 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -17,7 +17,7 @@ import { // generateRandomNodes, // getRandomInt, } from './utils/helpers'; -import webWorker from './utils/webWorker'; +import webWorker from '../../util/webWorker'; import { DEFAULT_RADIUS, DEFAULT_NODE_COLOR, diff --git a/awx/ui/src/screens/TopologyView/utils/webWorker.js b/awx/ui/src/screens/TopologyView/utils/webWorker.js deleted file mode 100644 index 7cc564b1c5..0000000000 --- a/awx/ui/src/screens/TopologyView/utils/webWorker.js +++ /dev/null @@ -1,3 +0,0 @@ -export default function webWorker() { - return new Worker(new URL('./workers/simulationWorker.js', import.meta.url)); -} diff --git a/awx/ui/src/util/webWorker.js b/awx/ui/src/util/webWorker.js new file mode 100644 index 0000000000..64c2eac037 --- /dev/null +++ b/awx/ui/src/util/webWorker.js @@ -0,0 +1,8 @@ +export default function webWorker() { + return new Worker( + new URL( + 'screens/TopologyView/utils/workers/simulationWorker.js', + import.meta.url + ) + ); +} From 4080007ced23c2734cd44d408be2c3d32a280f36 Mon Sep 17 00:00:00 2001 From: "Keith J. Grant" Date: Mon, 7 Mar 2022 10:04:11 -0800 Subject: [PATCH 096/125] JobOutput: add 'waiting for job' and 'no results found' messages --- .../components/ContentEmpty/ContentEmpty.js | 12 ++- .../src/screens/Job/JobOutput/EmptyOutput.js | 36 +++++++++ awx/ui/src/screens/Job/JobOutput/JobEvent.js | 8 +- awx/ui/src/screens/Job/JobOutput/JobOutput.js | 81 ++++++++++++------- 4 files changed, 101 insertions(+), 36 deletions(-) create mode 100644 awx/ui/src/screens/Job/JobOutput/EmptyOutput.js diff --git a/awx/ui/src/components/ContentEmpty/ContentEmpty.js b/awx/ui/src/components/ContentEmpty/ContentEmpty.js index a1d88be425..d74ee46dc6 100644 --- a/awx/ui/src/components/ContentEmpty/ContentEmpty.js +++ b/awx/ui/src/components/ContentEmpty/ContentEmpty.js @@ -1,6 +1,5 @@ import React from 'react'; import { t } from '@lingui/macro'; - import { Title, EmptyState, @@ -9,9 +8,14 @@ import { } from '@patternfly/react-core'; import { CubesIcon } from '@patternfly/react-icons'; -const ContentEmpty = ({ title = '', message = '' }) => ( - - +const ContentEmpty = ({ + title = '', + message = '', + icon = CubesIcon, + className = '', +}) => ( + + {title || t`No items found.`} diff --git a/awx/ui/src/screens/Job/JobOutput/EmptyOutput.js b/awx/ui/src/screens/Job/JobOutput/EmptyOutput.js new file mode 100644 index 0000000000..69e21eba5c --- /dev/null +++ b/awx/ui/src/screens/Job/JobOutput/EmptyOutput.js @@ -0,0 +1,36 @@ +import React, { useEffect } from 'react'; +import 'styled-components/macro'; +import { t } from '@lingui/macro'; +import { SearchIcon } from '@patternfly/react-icons'; +import ContentEmpty from 'components/ContentEmpty'; + +export default function EmptyOutput({ + hasQueryParams, + isJobRunning, + onUnmount, +}) { + let title; + let message; + let icon; + + useEffect(() => onUnmount); + + if (hasQueryParams) { + title = t`The search filter did not produce any results…`; + message = t`Please try another search using the filter above`; + icon = SearchIcon; + } else if (isJobRunning) { + title = t`Waiting for job output…`; + } else { + title = t`No output found for this job.`; + } + + return ( + + ); +} diff --git a/awx/ui/src/screens/Job/JobOutput/JobEvent.js b/awx/ui/src/screens/Job/JobOutput/JobEvent.js index 3516f24749..8f3dd81f8e 100644 --- a/awx/ui/src/screens/Job/JobOutput/JobEvent.js +++ b/awx/ui/src/screens/Job/JobOutput/JobEvent.js @@ -17,11 +17,15 @@ function JobEvent({ isCollapsed, onToggleCollapsed, hasChildren, + jobStatus, }) { const numOutputLines = lineTextHtml?.length || 0; useEffect(() => { - measure(); - }, [numOutputLines, isCollapsed, measure]); + const timeout = setTimeout(measure, 0); + return () => { + clearTimeout(timeout); + }; + }, [numOutputLines, isCollapsed, measure, jobStatus]); let toggleLineIndex = -1; if (hasChildren) { diff --git a/awx/ui/src/screens/Job/JobOutput/JobOutput.js b/awx/ui/src/screens/Job/JobOutput/JobOutput.js index 1173da6c80..c5eb8ca217 100644 --- a/awx/ui/src/screens/Job/JobOutput/JobOutput.js +++ b/awx/ui/src/screens/Job/JobOutput/JobOutput.js @@ -30,6 +30,7 @@ import JobEventSkeleton from './JobEventSkeleton'; import PageControls from './PageControls'; import HostEventModal from './HostEventModal'; import JobOutputSearch from './JobOutputSearch'; +import EmptyOutput from './EmptyOutput'; import { HostStatusBar, OutputToolbar } from './shared'; import getLineTextHtml from './getLineTextHtml'; import connectJobSocket, { closeWebSocket } from './connectJobSocket'; @@ -220,6 +221,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { ...Object.values(siblingRequests.current || {}), ...Object.values(numEventsRequests.current || {}), ]; + setHasContentLoading(true); // prevents "no content found" screen from flashing Promise.all(pendingRequests).then(() => { setRemoteRowCount(0); clearLoadedEvents(); @@ -509,6 +511,7 @@ function JobOutput({ job, eventRelatedSearchableKeys, eventSearchableKeys }) { onToggleCollapsed={() => { toggleNodeIsCollapsed(event.uuid, !node.isCollapsed); }} + jobStatus={jobStatus} /> ) : ( - {({ onRowsRendered, registerChild }) => ( - - {({ width, height }) => ( - <> - {hasContentLoading ? ( -
- -
- ) : ( - { - registerChild(ref); - listRef.current = ref; - }} - deferredMeasurementCache={cache} - height={height || 1} - onRowsRendered={onRowsRendered} - rowCount={totalNonCollapsedRows + wsEvents.length} - rowHeight={cache.rowHeight} - rowRenderer={rowRenderer} - scrollToAlignment="start" - width={width || 1} - overscanRowCount={20} - onScroll={handleScroll} - /> - )} - - )} -
- )} + {({ onRowsRendered, registerChild }) => { + if ( + !hasContentLoading && + remoteRowCount + wsEvents.length === 0 + ) { + return ( + 1} + isJobRunning={isJobRunning(jobStatus)} + onUnmount={() => { + if (listRef.current?.recomputeRowHeights) { + listRef.current.recomputeRowHeights(); + } + }} + /> + ); + } + return ( + + {({ width, height }) => ( + <> + {hasContentLoading ? ( +
+ +
+ ) : ( + { + registerChild(ref); + listRef.current = ref; + }} + deferredMeasurementCache={cache} + height={height || 1} + onRowsRendered={onRowsRendered} + rowCount={totalNonCollapsedRows + wsEvents.length} + rowHeight={cache.rowHeight} + rowRenderer={rowRenderer} + scrollToAlignment="start" + width={width || 1} + overscanRowCount={20} + onScroll={handleScroll} + /> + )} + + )} +
+ ); + }} From 8bf9dd038eb1cb647bb059d1c3d5b78601f41937 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Tue, 8 Mar 2022 12:58:04 -0800 Subject: [PATCH 097/125] Address review feedback. --- awx/ui/src/screens/TopologyView/ContentLoading.js | 1 - awx/ui/src/screens/TopologyView/MeshGraph.js | 4 ---- awx/ui/src/screens/TopologyView/utils/helpers.js | 2 +- 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/awx/ui/src/screens/TopologyView/ContentLoading.js b/awx/ui/src/screens/TopologyView/ContentLoading.js index cb67f6d34b..c4b07bf9f0 100644 --- a/awx/ui/src/screens/TopologyView/ContentLoading.js +++ b/awx/ui/src/screens/TopologyView/ContentLoading.js @@ -43,5 +43,4 @@ const ContentLoading = ({ className, progress }) => (
); -export { ContentLoading as _ContentLoading }; export default ContentLoading; diff --git a/awx/ui/src/screens/TopologyView/MeshGraph.js b/awx/ui/src/screens/TopologyView/MeshGraph.js index 2d7700b45e..01ed117f7b 100644 --- a/awx/ui/src/screens/TopologyView/MeshGraph.js +++ b/awx/ui/src/screens/TopologyView/MeshGraph.js @@ -14,8 +14,6 @@ import { redirectToDetailsPage, getHeight, getWidth, - // generateRandomNodes, - // getRandomInt, } from './utils/helpers'; import webWorker from '../../util/webWorker'; import { @@ -34,14 +32,12 @@ const Loader = styled(ContentLoading)` background: white; `; function MeshGraph({ data, showLegend, zoom, setShowZoomControls }) { - // function MeshGraph({ showLegend, zoom }) { const [isNodeSelected, setIsNodeSelected] = useState(false); const [selectedNode, setSelectedNode] = useState(null); const [nodeDetail, setNodeDetail] = useState(null); const [simulationProgress, setSimulationProgress] = useState(null); const history = useHistory(); - // const data = generateRandomNodes(getRandomInt(4, 50)); const draw = () => { setShowZoomControls(false); const width = getWidth(SELECTOR); diff --git a/awx/ui/src/screens/TopologyView/utils/helpers.js b/awx/ui/src/screens/TopologyView/utils/helpers.js index cb185cbe61..f8dee9866c 100644 --- a/awx/ui/src/screens/TopologyView/utils/helpers.js +++ b/awx/ui/src/screens/TopologyView/utils/helpers.js @@ -32,7 +32,7 @@ export function renderNodeIcon(selectedNode) { return false; } -export async function redirectToDetailsPage(selectedNode, history) { +export function redirectToDetailsPage(selectedNode, history) { const { id: nodeId } = selectedNode; const constructedURL = `/instances/${nodeId}/details`; history.push(constructedURL); From 80947e2b32e85dab4f5e25d2406eb01a12277625 Mon Sep 17 00:00:00 2001 From: kurokobo <2920259+kurokobo@users.noreply.github.com> Date: Wed, 9 Mar 2022 22:32:38 +0900 Subject: [PATCH 098/125] fix: add OPERATOR_VERSION as build-arg to pass the version to operator --- .github/workflows/stage.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/stage.yml b/.github/workflows/stage.yml index 64f4e48ac7..fd319fa6d8 100644 --- a/.github/workflows/stage.yml +++ b/.github/workflows/stage.yml @@ -83,7 +83,8 @@ jobs: - name: Build and stage awx-operator working-directory: awx-operator run: | - BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version }}" \ + BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version }} \ + --build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \ IMAGE_TAG_BASE=ghcr.io/${{ github.repository_owner }}/awx-operator \ VERSION=${{ github.event.inputs.operator_version }} make docker-build docker-push From e4f0153a7de744facc073a07005c7700cfe7660b Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Wed, 9 Mar 2022 06:58:43 -0800 Subject: [PATCH 099/125] Remove import statements from web worker file. --- awx/ui/src/util/simulationWorker.js | 34 +++++++++++++++++++++++++++++ awx/ui/src/util/webWorker.js | 7 +----- 2 files changed, 35 insertions(+), 6 deletions(-) create mode 100644 awx/ui/src/util/simulationWorker.js diff --git a/awx/ui/src/util/simulationWorker.js b/awx/ui/src/util/simulationWorker.js new file mode 100644 index 0000000000..d743e2e76c --- /dev/null +++ b/awx/ui/src/util/simulationWorker.js @@ -0,0 +1,34 @@ +/* eslint-disable no-undef */ +importScripts('https://d3js.org/d3-collection.v1.min.js'); +importScripts('https://d3js.org/d3-dispatch.v1.min.js'); +importScripts('https://d3js.org/d3-quadtree.v1.min.js'); +importScripts('https://d3js.org/d3-timer.v1.min.js'); +importScripts('https://d3js.org/d3-force.v1.min.js'); + +onmessage = function calculateLayout({ data: { nodes, links } }) { + const simulation = d3 + .forceSimulation(nodes) + .force('charge', d3.forceManyBody(15).strength(-50)) + .force( + 'link', + d3.forceLink(links).id((d) => d.hostname) + ) + .force('collide', d3.forceCollide(62)) + .force('forceX', d3.forceX(0)) + .force('forceY', d3.forceY(0)) + .stop(); + + for ( + let i = 0, + n = Math.ceil( + Math.log(simulation.alphaMin()) / Math.log(1 - simulation.alphaDecay()) + ); + i < n; + ++i + ) { + postMessage({ type: 'tick', progress: i / n }); + simulation.tick(); + } + + postMessage({ type: 'end', nodes, links }); +}; diff --git a/awx/ui/src/util/webWorker.js b/awx/ui/src/util/webWorker.js index 64c2eac037..7babb68f38 100644 --- a/awx/ui/src/util/webWorker.js +++ b/awx/ui/src/util/webWorker.js @@ -1,8 +1,3 @@ export default function webWorker() { - return new Worker( - new URL( - 'screens/TopologyView/utils/workers/simulationWorker.js', - import.meta.url - ) - ); + return new Worker(new URL('./simulationWorker.js', import.meta.url)); } From 4b7e3620cab833ef607fc82a0170f4bda0a02628 Mon Sep 17 00:00:00 2001 From: Marliana Lara Date: Wed, 9 Mar 2022 10:25:21 -0500 Subject: [PATCH 100/125] Add deleted details to job detail view --- awx/ui/src/screens/Job/JobDetail/JobDetail.js | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/awx/ui/src/screens/Job/JobDetail/JobDetail.js b/awx/ui/src/screens/Job/JobDetail/JobDetail.js index 1c9d7cac90..76e8329bf4 100644 --- a/awx/ui/src/screens/Job/JobDetail/JobDetail.js +++ b/awx/ui/src/screens/Job/JobDetail/JobDetail.js @@ -9,6 +9,7 @@ import styled from 'styled-components'; import { useConfig } from 'contexts/Config'; import AlertModal from 'components/AlertModal'; import { + DeletedDetail, DetailList, Detail, UserDateDetail, @@ -158,7 +159,7 @@ function JobDetail({ job, inventorySourceLabels }) { value={jobTypes[job.type]} /> - {inventory && ( + {inventory ? ( } /> + ) : ( + )} {inventory_source && ( <> @@ -215,7 +218,7 @@ function JobDetail({ job, inventorySourceLabels }) { } /> )} - {project && ( + {project ? ( <> + ) : ( + )} {scmBranch && ( Date: Wed, 9 Mar 2022 07:36:04 -0800 Subject: [PATCH 101/125] Add data-cy to content loader; move simulatioWorker to /util directory. --- .../screens/TopologyView/ContentLoading.js | 2 +- .../utils/workers/simulationWorker.js | 35 ------------------- 2 files changed, 1 insertion(+), 36 deletions(-) delete mode 100644 awx/ui/src/screens/TopologyView/utils/workers/simulationWorker.js diff --git a/awx/ui/src/screens/TopologyView/ContentLoading.js b/awx/ui/src/screens/TopologyView/ContentLoading.js index c4b07bf9f0..656edfe505 100644 --- a/awx/ui/src/screens/TopologyView/ContentLoading.js +++ b/awx/ui/src/screens/TopologyView/ContentLoading.js @@ -24,7 +24,7 @@ const TopologyIcon = styled(PFTopologyIcon)` `; const ContentLoading = ({ className, progress }) => ( - + d.hostname) - ) - .force('collide', d3.forceCollide(MESH_FORCE_LAYOUT.defaultCollisionFactor)) - .force('forceX', d3.forceX(MESH_FORCE_LAYOUT.defaultForceX)) - .force('forceY', d3.forceY(MESH_FORCE_LAYOUT.defaultForceY)) - .stop(); - - for ( - let i = 0, - n = Math.ceil( - Math.log(simulation.alphaMin()) / Math.log(1 - simulation.alphaDecay()) - ); - i < n; - ++i - ) { - postMessage({ type: 'tick', progress: i / n }); - simulation.tick(); - } - - postMessage({ type: 'end', nodes, links }); -}; From 53185a4ea56a36fe04f2d1cc07bfb965a054c137 Mon Sep 17 00:00:00 2001 From: nixocio Date: Fri, 4 Mar 2022 11:10:38 -0500 Subject: [PATCH 102/125] Add validation for hop nodes Add validation for hop nodes See: https://github.com/ansible/awx/issues/11622 --- awx/api/serializers.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index ff8e654f55..0a7e04ae12 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -4850,6 +4850,11 @@ class InstanceSerializer(BaseSerializer): else: return float("{0:.2f}".format(((float(obj.capacity) - float(obj.consumed_capacity)) / (float(obj.capacity))) * 100)) + def validate(self, attrs): + if self.instance.node_type == 'hop': + raise serializers.ValidationError(_('Hop node instances may not be changed.')) + return attrs + class InstanceHealthCheckSerializer(BaseSerializer): class Meta: From 99bbc347ec2678f08d2ec56007128c13e7c539b8 Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Wed, 9 Mar 2022 13:21:32 -0500 Subject: [PATCH 103/125] Fill in `errors` for hop nodes when Last Seen is out of date, and clear them when not (#11714) * Process unresponsive and newly responsive hop nodes * Use more natural way to zero hop node capacity, add test * Use warning as opposed to warn for log messages --- awx/main/models/ha.py | 20 +++++++++++------- awx/main/tasks/system.py | 23 +++++++-------------- awx/main/tests/functional/test_instances.py | 17 +++++++++++++++ 3 files changed, 38 insertions(+), 22 deletions(-) diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index add2564015..08d95bf86a 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -233,13 +233,19 @@ class Instance(HasPolicyEditsMixin, BaseModel): def refresh_capacity_fields(self): """Update derived capacity fields from cpu and memory (no save)""" - self.cpu_capacity = get_cpu_effective_capacity(self.cpu) - self.mem_capacity = get_mem_effective_capacity(self.memory) + if self.node_type == 'hop': + self.cpu_capacity = 0 + self.mem_capacity = 0 # formula has a non-zero offset, so we make sure it is 0 for hop nodes + else: + self.cpu_capacity = get_cpu_effective_capacity(self.cpu) + self.mem_capacity = get_mem_effective_capacity(self.memory) self.set_capacity_value() - def save_health_data(self, version, cpu, memory, uuid=None, update_last_seen=False, errors=''): - self.last_health_check = now() - update_fields = ['last_health_check'] + def save_health_data(self, version=None, cpu=0, memory=0, uuid=None, update_last_seen=False, errors=''): + update_fields = ['errors'] + if self.node_type != 'hop': + self.last_health_check = now() + update_fields.append('last_health_check') if update_last_seen: self.last_seen = self.last_health_check @@ -251,7 +257,7 @@ class Instance(HasPolicyEditsMixin, BaseModel): self.uuid = uuid update_fields.append('uuid') - if self.version != version: + if version is not None and self.version != version: self.version = version update_fields.append('version') @@ -270,7 +276,7 @@ class Instance(HasPolicyEditsMixin, BaseModel): self.errors = '' else: self.mark_offline(perform_save=False, errors=errors) - update_fields.extend(['cpu_capacity', 'mem_capacity', 'capacity', 'errors']) + update_fields.extend(['cpu_capacity', 'mem_capacity', 'capacity']) # disabling activity stream will avoid extra queries, which is important for heatbeat actions from awx.main.signals import disable_activity_stream diff --git a/awx/main/tasks/system.py b/awx/main/tasks/system.py index 43ac6c2b26..af02d3de55 100644 --- a/awx/main/tasks/system.py +++ b/awx/main/tasks/system.py @@ -436,7 +436,6 @@ def inspect_execution_nodes(instance_list): workers = mesh_status['Advertisements'] for ad in workers: hostname = ad['NodeID'] - changed = False if hostname in node_lookup: instance = node_lookup[hostname] @@ -458,11 +457,11 @@ def inspect_execution_nodes(instance_list): # Only execution nodes should be dealt with by execution_node_health_check if instance.node_type == 'hop': + logger.warning(f'Hop node {hostname}, has rejoined the receptor mesh') + instance.save_health_data(errors='') continue - if changed: - execution_node_health_check.apply_async([hostname]) - elif was_lost: + if was_lost: # if the instance *was* lost, but has appeared again, # attempt to re-establish the initial capacity and version # check @@ -534,20 +533,14 @@ def cluster_node_heartbeat(): except Exception: logger.exception('failed to reap jobs for {}'.format(other_inst.hostname)) try: - # Capacity could already be 0 because: - # * It's a new node and it never had a heartbeat - # * It was set to 0 by another tower node running this method - # * It was set to 0 by this node, but auto deprovisioning is off - # - # If auto deprovisioning is on, don't bother setting the capacity to 0 - # since we will delete the node anyway. - if other_inst.capacity != 0 and not settings.AWX_AUTO_DEPROVISION_INSTANCES: - other_inst.mark_offline(errors=_('Another cluster node has determined this instance to be unresponsive')) - logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen)) - elif settings.AWX_AUTO_DEPROVISION_INSTANCES: + if settings.AWX_AUTO_DEPROVISION_INSTANCES: deprovision_hostname = other_inst.hostname other_inst.delete() logger.info("Host {} Automatically Deprovisioned.".format(deprovision_hostname)) + elif other_inst.capacity != 0 or (not other_inst.errors): + other_inst.mark_offline(errors=_('Another cluster node has determined this instance to be unresponsive')) + logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen)) + except DatabaseError as e: if 'did not affect any rows' in str(e): logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname)) diff --git a/awx/main/tests/functional/test_instances.py b/awx/main/tests/functional/test_instances.py index 81771a7253..39afa7dd32 100644 --- a/awx/main/tests/functional/test_instances.py +++ b/awx/main/tests/functional/test_instances.py @@ -363,6 +363,23 @@ def test_health_check_oh_no(): assert instance.errors == 'This it not a real instance!' +@pytest.mark.django_db +def test_errors_field_alone(): + instance = Instance.objects.create(hostname='foo-1', enabled=True, node_type='hop') + + instance.save_health_data(errors='Node went missing!') + assert instance.errors == 'Node went missing!' + assert instance.capacity == 0 + assert instance.memory == instance.mem_capacity == 0 + assert instance.cpu == instance.cpu_capacity == 0 + + instance.save_health_data(errors='') + assert not instance.errors + assert instance.capacity == 0 + assert instance.memory == instance.mem_capacity == 0 + assert instance.cpu == instance.cpu_capacity == 0 + + @pytest.mark.django_db class TestInstanceGroupOrdering: def test_ad_hoc_instance_groups(self, instance_group_factory, inventory, default_instance_group): From f52ef6e9677b01c111b012a8725da43a2580d8f1 Mon Sep 17 00:00:00 2001 From: Alex Corey Date: Mon, 7 Feb 2022 13:47:29 -0500 Subject: [PATCH 104/125] Fixes case sensitive host count --- awx/main/managers.py | 4 ++-- awx/main/tests/functional/models/test_inventory.py | 10 ++++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/awx/main/managers.py b/awx/main/managers.py index 2614193fe1..404745b995 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -6,7 +6,7 @@ import logging import os from django.db import models from django.conf import settings - +from django.db.models.functions import Lower from awx.main.utils.filters import SmartFilter from awx.main.utils.pglock import advisory_lock from awx.main.utils.common import get_capacity_type @@ -35,7 +35,7 @@ class HostManager(models.Manager): - Only consider results that are unique - Return the count of this query """ - return self.order_by().exclude(inventory_sources__source='controller').values('name').distinct().count() + return self.order_by().exclude(inventory_sources__source='controller').values(name_lower=Lower('name')).distinct().count() def org_active_count(self, org_id): """Return count of active, unique hosts used by an organization. diff --git a/awx/main/tests/functional/models/test_inventory.py b/awx/main/tests/functional/models/test_inventory.py index 40620fd0a3..6c418e5b16 100644 --- a/awx/main/tests/functional/models/test_inventory.py +++ b/awx/main/tests/functional/models/test_inventory.py @@ -110,6 +110,16 @@ class TestActiveCount: source.hosts.create(name='remotely-managed-host', inventory=inventory) assert Host.objects.active_count() == 1 + def test_host_case_insensitivity(self, organization): + inv1 = Inventory.objects.create(name='inv1', organization=organization) + inv2 = Inventory.objects.create(name='inv2', organization=organization) + assert Host.objects.active_count() == 0 + inv1.hosts.create(name='host1') + inv2.hosts.create(name='Host1') + assert Host.objects.active_count() == 1 + inv1.hosts.create(name='host2') + assert Host.objects.active_count() == 2 + @pytest.mark.django_db class TestSCMUpdateFeatures: From 4de27117e82be7b3e318057d5119d0c2dfdfbd6c Mon Sep 17 00:00:00 2001 From: John Westcott IV <32551173+john-westcott-iv@users.noreply.github.com> Date: Thu, 10 Mar 2022 10:29:04 -0500 Subject: [PATCH 105/125] Adding option to enable and configure an OpenLDAP server next to AWX (#11843) --- Makefile | 5 +- tools/docker-compose/README.md | 37 ++++++++ tools/docker-compose/ansible/plumb_ldap.yml | 32 +++++++ .../ansible/roles/sources/defaults/main.yml | 9 ++ .../ansible/roles/sources/files/ldap.ldif | 86 +++++++++++++++++++ .../ansible/roles/sources/tasks/ldap.yml | 18 ++++ .../ansible/roles/sources/tasks/main.yml | 4 + .../sources/templates/docker-compose.yml.j2 | 28 ++++++ .../ansible/templates/ldap_settings.json.j2 | 52 +++++++++++ 9 files changed, 270 insertions(+), 1 deletion(-) create mode 100644 tools/docker-compose/ansible/plumb_ldap.yml create mode 100644 tools/docker-compose/ansible/roles/sources/files/ldap.ldif create mode 100644 tools/docker-compose/ansible/roles/sources/tasks/ldap.yml create mode 100644 tools/docker-compose/ansible/templates/ldap_settings.json.j2 diff --git a/Makefile b/Makefile index 968db81d57..bc4a6589f8 100644 --- a/Makefile +++ b/Makefile @@ -13,6 +13,8 @@ COMPOSE_TAG ?= $(GIT_BRANCH) MAIN_NODE_TYPE ?= hybrid # If set to true docker-compose will also start a keycloak instance KEYCLOAK ?= false +# If set to true docker-compose will also start an ldap instance +LDAP ?= false VENV_BASE ?= /var/lib/awx/venv @@ -462,7 +464,8 @@ docker-compose-sources: .git/hooks/pre-commit -e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \ -e execution_node_count=$(EXECUTION_NODE_COUNT) \ -e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \ - -e enable_keycloak=$(KEYCLOAK) + -e enable_keycloak=$(KEYCLOAK) \ + -e enable_ldap=$(LDAP) docker-compose: awx/projects docker-compose-sources diff --git a/tools/docker-compose/README.md b/tools/docker-compose/README.md index 885faf1b06..c58d11b3f7 100644 --- a/tools/docker-compose/README.md +++ b/tools/docker-compose/README.md @@ -244,6 +244,7 @@ $ make docker-compose - [Start a Cluster](#start-a-cluster) - [Start with Minikube](#start-with-minikube) - [Keycloak Integration](#keycloak-integration) +- [OpenLDAP Integration](#openldap-integration) ### Start a Shell @@ -390,3 +391,39 @@ Once the playbook is done running SAML should now be setup in your development e 3. awx_auditor:audit123 The first account is a normal user. The second account has the attribute is_superuser set in Keycloak so will be a super user in AWX. The third account has the is_system_auditor attribute in Keycloak so it will be a system auditor in AWX. To log in with one of these Keycloak users go to the AWX login screen and click the small "Sign In With SAML Keycloak" button at the bottom of the login box. + +### OpenLDAP Integration + +OpenLDAP is an LDAP provider that can be used to test AWX with LDAP integration. This section describes how to build a reference OpenLDAP instance and plumb it with your AWX for testing purposes. + +First, be sure that you have the awx.awx collection installed by running `make install_collection`. + +Anytime you want to run an OpenLDAP instance alongside AWX we can start docker-compose with the LDAP option to get an LDAP instance with the command: +```bash +LDAP=true make docker-compose +``` + +Once the containers come up two new ports (389, 636) should be exposed and the LDAP server should be running on those ports. The first port (389) is non-SSL and the second port (636) is SSL enabled. + +Now we are ready to configure and plumb OpenLDAP with AWX. To do this we have provided a playbook which will: +* Backup and configure the LDAP adapter in AWX. NOTE: this will back up your existing settings but the password fields can not be backuped through the API, you need a DB backup to recover this. + +Note: The default configuration will utilize the non-tls connection. If you want to use the tls configuration you will need to work through TLS negotiation issues because the LDAP server is using a self signed certificate. + +Before we can run the playbook we need to understand that LDAP will be communicated to from within the AWX container. Because of this, we have to tell AWX how to route traffic to the LDAP container through the `LDAP Server URI` settings. The playbook requires a variable called container_reference to be set. The container_reference variable needs to be how your AWX container will be able to talk to the LDAP container. See the SAML section for some examples for how to select a `container_reference`. + +Once you have your container reference you can run the playbook like: +```bash +export CONTROLLER_USERNAME= +export CONTROLLER_PASSWORD= +ansible-playbook tools/docker-compose/ansible/plumb_ldap.yml -e container_reference= +``` + + +Once the playbook is done running LDAP should now be setup in your development environment. This realm has four users with the following username/passwords: +1. awx_ldap_unpriv:unpriv123 +2. awx_ldap_admin:admin123 +3. awx_ldap_auditor:audit123 +4. awx_ldap_org_admin:orgadmin123 + +The first account is a normal user. The second account will be a super user in AWX. The third account will be a system auditor in AWX. The fourth account is an org admin. All users belong to an org called "LDAP Organization". To log in with one of these users go to the AWX login screen enter the username/password. diff --git a/tools/docker-compose/ansible/plumb_ldap.yml b/tools/docker-compose/ansible/plumb_ldap.yml new file mode 100644 index 0000000000..061f450804 --- /dev/null +++ b/tools/docker-compose/ansible/plumb_ldap.yml @@ -0,0 +1,32 @@ +--- +- name: Plumb an ldap instance + hosts: localhost + connection: local + gather_facts: False + vars: + awx_host: "https://localhost:8043" + tasks: + - name: Load existing and new LDAP settings + set_fact: + existing_ldap: "{{ lookup('awx.awx.controller_api', 'settings/ldap', host=awx_host, verify_ssl=false) }}" + new_ldap: "{{ lookup('template', 'ldap_settings.json.j2') }}" + + - name: Display existing LDAP configuration + debug: + msg: + - "Here is your existing LDAP configuration for reference:" + - "{{ existing_ldap }}" + + - pause: + prompt: "Continuing to run this will replace your existing ldap settings (displayed above). They will all be captured. Be sure that is backed up before continuing" + + - name: Write out the existing content + copy: + dest: "../_sources/existing_ldap_adapter_settings.json" + content: "{{ existing_ldap }}" + + - name: Configure AWX LDAP adapter + awx.awx.settings: + settings: "{{ new_ldap }}" + controller_host: "{{ awx_host }}" + validate_certs: False diff --git a/tools/docker-compose/ansible/roles/sources/defaults/main.yml b/tools/docker-compose/ansible/roles/sources/defaults/main.yml index df035384c7..c0cdb12b7f 100644 --- a/tools/docker-compose/ansible/roles/sources/defaults/main.yml +++ b/tools/docker-compose/ansible/roles/sources/defaults/main.yml @@ -18,3 +18,12 @@ work_sign_private_keyfile: "{{ work_sign_key_dir }}/work_private_key.pem" work_sign_public_keyfile: "{{ work_sign_key_dir }}/work_public_key.pem" enable_keycloak: false + +enable_ldap: false +ldap_public_key_file_name: 'ldap.cert' +ldap_private_key_file_name: 'ldap.key' +ldap_cert_dir: '{{ sources_dest }}/ldap_certs' +ldap_diff_dir: '{{ sources_dest }}/ldap_diffs' +ldap_public_key_file: '{{ ldap_cert_dir }}/{{ ldap_public_key_file_name }}' +ldap_private_key_file: '{{ ldap_cert_dir }}/{{ ldap_private_key_file_name }}' +ldap_cert_subject: "/C=US/ST=NC/L=Durham/O=awx/CN=" diff --git a/tools/docker-compose/ansible/roles/sources/files/ldap.ldif b/tools/docker-compose/ansible/roles/sources/files/ldap.ldif new file mode 100644 index 0000000000..4812fff01e --- /dev/null +++ b/tools/docker-compose/ansible/roles/sources/files/ldap.ldif @@ -0,0 +1,86 @@ +dn: dc=example,dc=org +objectClass: dcObject +objectClass: organization +dc: example +o: example + +dn: ou=users,dc=example,dc=org +ou: users +objectClass: organizationalUnit + +dn: cn=awx_ldap_admin,ou=users,dc=example,dc=org +mail: admin@example.org +sn: LdapAdmin +cn: awx_ldap_admin +objectClass: top +objectClass: person +objectClass: organizationalPerson +objectClass: inetOrgPerson +userPassword: admin123 +givenName: awx + +dn: cn=awx_ldap_auditor,ou=users,dc=example,dc=org +mail: auditor@example.org +sn: LdapAuditor +cn: awx_ldap_auditor +objectClass: top +objectClass: person +objectClass: organizationalPerson +objectClass: inetOrgPerson +userPassword: audit123 +givenName: awx + +dn: cn=awx_ldap_unpriv,ou=users,dc=example,dc=org +mail: unpriv@example.org +sn: LdapUnpriv +cn: awx_ldap_unpriv +objectClass: top +objectClass: person +objectClass: organizationalPerson +objectClass: inetOrgPerson +givenName: awx +userPassword: unpriv123 + +dn: ou=groups,dc=example,dc=org +ou: groups +objectClass: top +objectClass: organizationalUnit + +dn: cn=awx_users,ou=groups,dc=example,dc=org +cn: awx_users +objectClass: top +objectClass: groupOfNames +member: cn=awx_ldap_admin,ou=users,dc=example,dc=org +member: cn=awx_ldap_auditor,ou=users,dc=example,dc=org +member: cn=awx_ldap_unpriv,ou=users,dc=example,dc=org +member: cn=awx_ldap_org_admin,ou=users,dc=example,dc=org + +dn: cn=awx_admins,ou=groups,dc=example,dc=org +cn: awx_admins +objectClass: top +objectClass: groupOfNames +member: cn=awx_ldap_admin,ou=users,dc=example,dc=org + +dn: cn=awx_auditors,ou=groups,dc=example,dc=org +cn: awx_auditors +objectClass: top +objectClass: groupOfNames +member: cn=awx_ldap_auditor,ou=users,dc=example,dc=org + +dn: cn=awx_ldap_org_admin,ou=users,dc=example,dc=org +mail: org.admin@example.org +sn: LdapOrgAdmin +cn: awx_ldap_org_admin +objectClass: top +objectClass: person +objectClass: organizationalPerson +objectClass: inetOrgPerson +givenName: awx +userPassword: orgadmin123 + +dn: cn=awx_org_admins,ou=groups,dc=example,dc=org +cn: awx_org_admins +objectClass: top +objectClass: groupOfNames +member: cn=awx_ldap_org_admin,ou=users,dc=example,dc=org + diff --git a/tools/docker-compose/ansible/roles/sources/tasks/ldap.yml b/tools/docker-compose/ansible/roles/sources/tasks/ldap.yml new file mode 100644 index 0000000000..ea46ec3afa --- /dev/null +++ b/tools/docker-compose/ansible/roles/sources/tasks/ldap.yml @@ -0,0 +1,18 @@ +--- +- name: Create LDAP cert directory + file: + path: "{{ item }}" + state: directory + loop: + - "{{ ldap_cert_dir }}" + - "{{ ldap_diff_dir }}" + +- name: General LDAP cert + command: 'openssl req -new -x509 -days 365 -nodes -out {{ ldap_public_key_file }} -keyout {{ ldap_private_key_file }} -subj "{{ ldap_cert_subject }}"' + args: + creates: "{{ ldap_public_key_file }}" + +- name: Copy ldap.diff + copy: + src: "ldap.ldif" + dest: "{{ ldap_diff_dir }}/ldap.ldif" diff --git a/tools/docker-compose/ansible/roles/sources/tasks/main.yml b/tools/docker-compose/ansible/roles/sources/tasks/main.yml index c7771d6b74..05b5b9facf 100644 --- a/tools/docker-compose/ansible/roles/sources/tasks/main.yml +++ b/tools/docker-compose/ansible/roles/sources/tasks/main.yml @@ -91,6 +91,10 @@ args: creates: "{{ work_sign_public_keyfile }}" +- name: Include LDAP tasks if enabled + include_tasks: ldap.yml + when: enable_ldap | bool + - name: Render Docker-Compose template: src: docker-compose.yml.j2 diff --git a/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 b/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 index eaf2c20efe..0406b291cc 100644 --- a/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 +++ b/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 @@ -99,6 +99,29 @@ services: DB_PASSWORD: {{ pg_password }} depends_on: - postgres +{% endif %} +{% if enable_ldap|bool %} + ldap: + image: bitnami/openldap:2 + container_name: tools_ldap_1 + hostname: ldap + user: "{{ ansible_user_uid }}" + ports: + - "389:1389" + - "636:1636" + environment: + LDAP_ADMIN_USERNAME: admin + LDAP_ADMIN_PASSWORD: admin + LDAP_CUSTOM_LDIF_DIR: /opt/bitnami/openldap/ldiffs + LDAP_ENABLE_TLS: "yes" + LDAP_LDAPS_PORT_NUMBER: 1636 + LDAP_TLS_CERT_FILE: /opt/bitnami/openldap/certs/{{ ldap_public_key_file_name }} + LDAP_TLS_CA_FILE: /opt/bitnami/openldap/certs/{{ ldap_public_key_file_name }} + LDAP_TLS_KEY_FILE: /opt/bitnami/openldap/certs/{{ ldap_private_key_file_name }} + volumes: + - 'openldap_data:/bitnami/openldap' + - '../../docker-compose/_sources/ldap_certs:/opt/bitnami/openldap/certs' + - '../../docker-compose/_sources/ldap_diffs:/opt/bitnami/openldap/ldiffs' {% endif %} # A useful container that simply passes through log messages to the console # helpful for testing awx/tower logging @@ -157,6 +180,11 @@ volumes: redis_socket_{{ container_postfix }}: name: tools_redis_socket_{{ container_postfix }} {% endfor -%} +{% if enable_ldap %} + openldap_data: + name: tools_ldap_1 + driver: local +{% endif %} {% if minikube_container_group|bool %} networks: default: diff --git a/tools/docker-compose/ansible/templates/ldap_settings.json.j2 b/tools/docker-compose/ansible/templates/ldap_settings.json.j2 new file mode 100644 index 0000000000..8dc07b2c88 --- /dev/null +++ b/tools/docker-compose/ansible/templates/ldap_settings.json.j2 @@ -0,0 +1,52 @@ +{ + "AUTH_LDAP_1_SERVER_URI": "ldap://{{ container_reference }}:389", + "AUTH_LDAP_1_BIND_DN": "cn=admin,dc=example,dc=org", + "AUTH_LDAP_1_BIND_PASSWORD": "admin", + "AUTH_LDAP_1_START_TLS": false, + "AUTH_LDAP_1_CONNECTION_OPTIONS": { + "OPT_REFERRALS": 0, + "OPT_NETWORK_TIMEOUT": 30 + }, + "AUTH_LDAP_1_USER_SEARCH": [ + "ou=users,dc=example,dc=org", + "SCOPE_SUBTREE", + "(cn=%(user)s)" + ], + "AUTH_LDAP_1_USER_DN_TEMPLATE": "cn=%(user)s,ou=users,dc=example,dc=org", + "AUTH_LDAP_1_USER_ATTR_MAP": { + "first_name": "givenName", + "last_name": "sn", + "email": "mail" + }, + "AUTH_LDAP_1_GROUP_SEARCH": [ + "ou=groups,dc=example,dc=org", + "SCOPE_SUBTREE", + "(objectClass=groupOfNames)" + ], + "AUTH_LDAP_1_GROUP_TYPE": "MemberDNGroupType", + "AUTH_LDAP_1_GROUP_TYPE_PARAMS": { + "member_attr": "member", + "name_attr": "cn" + }, + "AUTH_LDAP_1_REQUIRE_GROUP": "cn=awx_users,ou=groups,dc=example,dc=org", + "AUTH_LDAP_1_DENY_GROUP": null, + "AUTH_LDAP_1_USER_FLAGS_BY_GROUP": { + "is_superuser": [ + "cn=awx_admins,ou=groups,dc=example,dc=org" + ], + "is_system_auditor": [ + "cn=awx_auditors,ou=groups,dc=example,dc=org" + ] + }, + "AUTH_LDAP_1_ORGANIZATION_MAP": { + "LDAP Organization": { + "users": true, + "remove_admins": false, + "remove_users": true, + "admins": [ + "cn=awx_org_admins,ou=groups,dc=example,dc=org" + ] + } + }, + "AUTH_LDAP_1_TEAM_MAP": {} +} From bc783b8f941bc7f504327983ff39318e384d661d Mon Sep 17 00:00:00 2001 From: John Westcott IV <32551173+john-westcott-iv@users.noreply.github.com> Date: Thu, 10 Mar 2022 13:49:18 -0500 Subject: [PATCH 106/125] Enabling export of applications through awxkit (#11887) --- awx/api/serializers.py | 6 ++++++ awxkit/awxkit/api/pages/api.py | 1 + awxkit/awxkit/api/pages/applications.py | 1 + 3 files changed, 8 insertions(+) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index ff8e654f55..a5d6e171e9 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -1263,6 +1263,12 @@ class OAuth2ApplicationSerializer(BaseSerializer): activity_stream=self.reverse('api:o_auth2_application_activity_stream_list', kwargs={'pk': obj.pk}), ) ) + if obj.organization_id: + res.update( + dict( + organization=self.reverse('api:organization_detail', kwargs={'pk': obj.organization_id}), + ) + ) return res def get_modified(self, obj): diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 7ce4b7a042..b2c7a462ad 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -25,6 +25,7 @@ EXPORTABLE_RESOURCES = [ 'job_templates', 'workflow_job_templates', 'execution_environments', + 'applications', ] diff --git a/awxkit/awxkit/api/pages/applications.py b/awxkit/awxkit/api/pages/applications.py index 18737cd883..a3ea0d29bd 100644 --- a/awxkit/awxkit/api/pages/applications.py +++ b/awxkit/awxkit/api/pages/applications.py @@ -10,6 +10,7 @@ from . import base class OAuth2Application(HasCreate, base.Base): dependencies = [Organization] + NATURAL_KEY = ('organization', 'name') def payload(self, **kwargs): payload = PseudoNamespace( From d4a4ba7fdb9fab2acbf19eeff05b72c121d62715 Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Fri, 11 Mar 2022 11:08:04 -0500 Subject: [PATCH 107/125] Move location of AWX_ISOLATION_SHOW_PATHS so it is editable --- awx/settings/production.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/awx/settings/production.py b/awx/settings/production.py index 9f480a188a..d4a40ef5d4 100644 --- a/awx/settings/production.py +++ b/awx/settings/production.py @@ -39,8 +39,20 @@ BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv") # Base virtualenv paths and enablement AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx") +# Very important that this is editable (not read_only) in the API +AWX_ISOLATION_SHOW_PATHS = [ + '/etc/pki/ca-trust:/etc/pki/ca-trust:O', + '/usr/share/pki:/usr/share/pki:O', +] + # Store a snapshot of default settings at this point before loading any # customizable config files. +# +############################################################################################### +# +# Any settings defined after this point will be marked as as a read_only database setting +# +################################################################################################ DEFAULTS_SNAPSHOT = {} this_module = sys.modules[__name__] for setting in dir(this_module): @@ -91,8 +103,3 @@ except IOError: DATABASES.setdefault('default', dict()).setdefault('OPTIONS', dict()).setdefault( 'application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63] ) # noqa - -AWX_ISOLATION_SHOW_PATHS = [ - '/etc/pki/ca-trust:/etc/pki/ca-trust:O', - '/usr/share/pki:/usr/share/pki:O', -] From 2c073ae488bfe38dc5102f687cac7767ed749e05 Mon Sep 17 00:00:00 2001 From: Kia Lam Date: Fri, 11 Mar 2022 08:50:28 -0800 Subject: [PATCH 108/125] Add d3 to CSP. --- awx/ui/public/index.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/ui/public/index.html b/awx/ui/public/index.html index 301f9bf209..67c11376ac 100644 --- a/awx/ui/public/index.html +++ b/awx/ui/public/index.html @@ -24,7 +24,7 @@ <% } else { %> From 5be901c0443947c0c9e78b8cb545b954e37ec8f2 Mon Sep 17 00:00:00 2001 From: Tiago Date: Fri, 11 Mar 2022 15:51:35 -0300 Subject: [PATCH 109/125] fix CSP rules --- awx/ui/public/index.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/ui/public/index.html b/awx/ui/public/index.html index 67c11376ac..c3200940bf 100644 --- a/awx/ui/public/index.html +++ b/awx/ui/public/index.html @@ -24,7 +24,7 @@ <% } else { %> From 7114b9fa11f9477e62a96e59a6a837796ea21df0 Mon Sep 17 00:00:00 2001 From: "Keith J. Grant" Date: Mon, 14 Mar 2022 09:29:02 -0700 Subject: [PATCH 110/125] add line wrap formatting to ErrorDetail --- awx/ui/src/components/ErrorDetail/ErrorDetail.js | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/awx/ui/src/components/ErrorDetail/ErrorDetail.js b/awx/ui/src/components/ErrorDetail/ErrorDetail.js index 81a7176998..1ecceea468 100644 --- a/awx/ui/src/components/ErrorDetail/ErrorDetail.js +++ b/awx/ui/src/components/ErrorDetail/ErrorDetail.js @@ -1,3 +1,4 @@ +import 'styled-components/macro'; import React, { useState } from 'react'; import PropTypes from 'prop-types'; import styled from 'styled-components'; @@ -68,7 +69,11 @@ function ErrorDetail({ error }) { ); }; - const renderStack = () => {error.stack}; + const renderStack = () => ( + + {error.stack} + + ); return ( Date: Wed, 16 Feb 2022 13:29:35 -0500 Subject: [PATCH 111/125] Upgrade to Django 3.2 - upgrades - Django 3.2.12 - pytz 2021.3 (from 2019.3) - oauthlib 3.2.0 (from 3.1.0) - requests-oauthlib 1.3.1 (from 1.3.0) - django-guid 3.2.1 (from 2.2.1) - django-solo 2.0.0 (from 1.1.3) - django-taggit 2.1.0 (from 1.2.0) - netaddr 0.8.0 (from 0.7.19) - pyrad 2.4 (from 2.3) - django-radius devel (from 1.3.3) - future devel (from 0.16.0) - django-guid, django-solo, and django-taggit are upgraded to fix the AppConfig deprecation warning. FIXME: django-guid devel has the fix, but it hasn't been released yet. - Released versions of django-radius have a hard-coded pin to future==0.16.0, which has a Python warning due to an improperly escaped character. This is fixed in future devel, so for now we are pinning to references to the git repos. - netaddr had a bunch of Python syntax and deprecation warnings --- .../{future.txt => python-future.txt} | 0 requirements/requirements.in | 5 ++-- requirements/requirements.txt | 27 ++++++++++--------- requirements/requirements_git.txt | 3 +++ 4 files changed, 19 insertions(+), 16 deletions(-) rename docs/licenses/{future.txt => python-future.txt} (100%) diff --git a/docs/licenses/future.txt b/docs/licenses/python-future.txt similarity index 100% rename from docs/licenses/future.txt rename to docs/licenses/python-future.txt diff --git a/requirements/requirements.in b/requirements/requirements.in index d7ace60d6b..48f4153086 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -9,17 +9,16 @@ cryptography>=3.2 Cython<3 # Since the bump to PyYAML 5.4.1 this is now a mandatory dep daphne distro -django==3.1.14 # see UPGRADE BLOCKERs +django==3.2.12 # see UPGRADE BLOCKERs django-auth-ldap django-cors-headers>=3.5.0 django-crum django-extensions>=2.2.9 # https://github.com/ansible/awx/pull/6441 -django-guid==2.2.1 # see https://pypi.org/project/django-guid/ for supported versions +django-guid==3.2.1 django-oauth-toolkit==1.4.1 django-polymorphic django-pglocks django-qsstats-magic -django-radius==1.3.3 # FIX auth does not work with later versions django-redis django-solo django-split-settings diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 8ce2162d25..1349f34ffd 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -86,7 +86,7 @@ dictdiffer==0.8.1 # via openshift distro==1.5.0 # via -r /awx_devel/requirements/requirements.in -django==3.1.14 +django==3.2.12 # via # -r /awx_devel/requirements/requirements.in # channels @@ -96,6 +96,7 @@ django==3.1.14 # django-guid # django-oauth-toolkit # django-polymorphic + # django-solo # django-taggit # djangorestframework django-auth-ldap==4.0.0 @@ -106,7 +107,7 @@ django-crum==0.7.9 # via -r /awx_devel/requirements/requirements.in django-extensions==2.2.9 # via -r /awx_devel/requirements/requirements.in -django-guid==2.2.1 +django-guid==3.2.1 # via -r /awx_devel/requirements/requirements.in django-oauth-toolkit==1.4.1 # via -r /awx_devel/requirements/requirements.in @@ -116,15 +117,14 @@ django-polymorphic==3.1.0 # via -r /awx_devel/requirements/requirements.in django-qsstats-magic==1.1.0 # via -r /awx_devel/requirements/requirements.in -django-radius==1.3.3 - # via -r /awx_devel/requirements/requirements.in + # via -r /awx_devel/requirements/requirements_git.txt django-redis==4.5.0 # via -r /awx_devel/requirements/requirements.in -django-solo==1.1.3 +django-solo==2.0.0 # via -r /awx_devel/requirements/requirements.in django-split-settings==1.0.0 # via -r /awx_devel/requirements/requirements.in -django-taggit==1.2.0 +django-taggit==2.1.0 # via -r /awx_devel/requirements/requirements.in djangorestframework==3.13.1 # via -r /awx_devel/requirements/requirements.in @@ -132,8 +132,9 @@ djangorestframework-yaml==2.0.0 # via -r /awx_devel/requirements/requirements.in docutils==0.16 # via python-daemon -future==0.16.0 - # via django-radius + # via + # -r /awx_devel/requirements/requirements_git.txt + # django-radius gitdb==4.0.2 # via gitpython gitpython==3.1.7 @@ -214,9 +215,9 @@ multidict==4.7.5 # via # aiohttp # yarl -netaddr==0.7.19 +netaddr==0.8.0 # via pyrad -oauthlib==3.1.0 +oauthlib==3.2.0 # via # django-oauth-toolkit # requests-oauthlib @@ -267,7 +268,7 @@ pyparsing==2.4.6 # via # -r /awx_devel/requirements/requirements.in # packaging -pyrad==2.3 +pyrad==2.4 # via django-radius pyrsistent==0.15.7 # via jsonschema @@ -292,7 +293,7 @@ python3-openid==3.1.0 # via social-auth-core python3-saml==1.13.0 # via -r /awx_devel/requirements/requirements.in -pytz==2019.3 +pytz==2021.3 # via # django # djangorestframework @@ -325,7 +326,7 @@ requests==2.26.0 # requests-oauthlib # social-auth-core # twilio -requests-oauthlib==1.3.0 +requests-oauthlib==1.3.1 # via # kubernetes # msrest diff --git a/requirements/requirements_git.txt b/requirements/requirements_git.txt index 01b668ee18..ab13d9aed2 100644 --- a/requirements/requirements_git.txt +++ b/requirements/requirements_git.txt @@ -1,3 +1,6 @@ git+https://github.com/ansible/system-certifi.git@devel#egg=certifi # Remove pbr from requirements.in when moving ansible-runner to requirements.in git+https://github.com/ansible/ansible-runner.git@devel#egg=ansible-runner +# django-radius has an aggressive pin of future==0.16.0, see https://github.com/robgolding/django-radius/pull/25 +git+https://github.com/ansible/django-radius.git@develop#egg=django-radius +git+https://github.com/PythonCharmers/python-future@master#egg=future From 1803c5bdb43bbd431d8219f3f0829154fda73f31 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Thu, 17 Feb 2022 10:54:15 -0500 Subject: [PATCH 112/125] Fix up usage of django-guid It has replaced the class-based middleware, everything is function-based now. --- awx/main/dispatch/periodic.py | 5 +++-- awx/main/dispatch/pool.py | 4 ++-- awx/main/dispatch/publish.py | 4 ++-- awx/main/dispatch/worker/callback.py | 6 +++--- awx/main/dispatch/worker/task.py | 4 ++-- awx/main/tasks/callback.py | 4 ++-- awx/main/utils/filters.py | 4 ++-- awx/settings/defaults.py | 2 +- 8 files changed, 17 insertions(+), 16 deletions(-) diff --git a/awx/main/dispatch/periodic.py b/awx/main/dispatch/periodic.py index bfeff05fca..e3e7da5db9 100644 --- a/awx/main/dispatch/periodic.py +++ b/awx/main/dispatch/periodic.py @@ -6,7 +6,8 @@ from multiprocessing import Process from django.conf import settings from django.db import connections from schedule import Scheduler -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid +from django_guid.utils import generate_guid from awx.main.dispatch.worker import TaskWorker @@ -32,7 +33,7 @@ class Scheduler(Scheduler): # If the database connection has a hiccup, re-establish a new # connection conn.close_if_unusable_or_obsolete() - GuidMiddleware.set_guid(GuidMiddleware._generate_guid()) + set_guid(generate_guid()) self.run_pending() except Exception: logger.exception('encountered an error while scheduling periodic tasks') diff --git a/awx/main/dispatch/pool.py b/awx/main/dispatch/pool.py index 21b1e6b9be..576f6bf799 100644 --- a/awx/main/dispatch/pool.py +++ b/awx/main/dispatch/pool.py @@ -16,7 +16,7 @@ from queue import Full as QueueFull, Empty as QueueEmpty from django.conf import settings from django.db import connection as django_connection, connections from django.core.cache import cache as django_cache -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid from jinja2 import Template import psutil @@ -436,7 +436,7 @@ class AutoscalePool(WorkerPool): def write(self, preferred_queue, body): if 'guid' in body: - GuidMiddleware.set_guid(body['guid']) + set_guid(body['guid']) try: # when the cluster heartbeat occurs, clean up internally if isinstance(body, dict) and 'cluster_node_heartbeat' in body['task']: diff --git a/awx/main/dispatch/publish.py b/awx/main/dispatch/publish.py index 63b2890e1e..e873465155 100644 --- a/awx/main/dispatch/publish.py +++ b/awx/main/dispatch/publish.py @@ -5,7 +5,7 @@ import json from uuid import uuid4 from django.conf import settings -from django_guid.middleware import GuidMiddleware +from django_guid import get_guid from . import pg_bus_conn @@ -76,7 +76,7 @@ class task: logger.error(msg) raise ValueError(msg) obj = {'uuid': task_id, 'args': args, 'kwargs': kwargs, 'task': cls.name} - guid = GuidMiddleware.get_guid() + guid = get_guid() if guid: obj['guid'] = guid obj.update(**kw) diff --git a/awx/main/dispatch/worker/callback.py b/awx/main/dispatch/worker/callback.py index a88286364a..5026e72c06 100644 --- a/awx/main/dispatch/worker/callback.py +++ b/awx/main/dispatch/worker/callback.py @@ -9,7 +9,7 @@ from django.conf import settings from django.utils.timezone import now as tz_now from django.db import DatabaseError, OperationalError, connection as django_connection from django.db.utils import InterfaceError, InternalError -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid import psutil @@ -184,7 +184,7 @@ class CallbackBrokerWorker(BaseWorker): if body.get('event') == 'EOF': try: if 'guid' in body: - GuidMiddleware.set_guid(body['guid']) + set_guid(body['guid']) final_counter = body.get('final_counter', 0) logger.info('Event processing is finished for Job {}, sending notifications'.format(job_identifier)) # EOF events are sent when stdout for the running task is @@ -208,7 +208,7 @@ class CallbackBrokerWorker(BaseWorker): logger.exception('Worker failed to emit notifications: Job {}'.format(job_identifier)) finally: self.subsystem_metrics.inc('callback_receiver_events_in_memory', -1) - GuidMiddleware.set_guid('') + set_guid('') return skip_websocket_message = body.pop('skip_websocket_message', False) diff --git a/awx/main/dispatch/worker/task.py b/awx/main/dispatch/worker/task.py index 91ce7f47b4..e1fe196ddb 100644 --- a/awx/main/dispatch/worker/task.py +++ b/awx/main/dispatch/worker/task.py @@ -7,7 +7,7 @@ import traceback from kubernetes.config import kube_config from django.conf import settings -from django_guid.middleware import GuidMiddleware +from django_guid import set_guid from awx.main.tasks.system import dispatch_startup, inform_cluster_of_shutdown @@ -54,7 +54,7 @@ class TaskWorker(BaseWorker): args = body.get('args', []) kwargs = body.get('kwargs', {}) if 'guid' in body: - GuidMiddleware.set_guid(body.pop('guid')) + set_guid(body.pop('guid')) _call = TaskWorker.resolve_callable(task) if inspect.isclass(_call): # the callable is a class, e.g., RunJob; instantiate and diff --git a/awx/main/tasks/callback.py b/awx/main/tasks/callback.py index d9b04c0b15..b1a4c450e5 100644 --- a/awx/main/tasks/callback.py +++ b/awx/main/tasks/callback.py @@ -8,7 +8,7 @@ import stat # Django from django.utils.timezone import now from django.conf import settings -from django_guid.middleware import GuidMiddleware +from django_guid import get_guid # AWX from awx.main.redact import UriCleaner @@ -25,7 +25,7 @@ class RunnerCallback: def __init__(self, model=None): self.parent_workflow_job_id = None self.host_map = {} - self.guid = GuidMiddleware.get_guid() + self.guid = get_guid() self.job_created = None self.recent_event_timings = deque(maxlen=settings.MAX_WEBSOCKET_EVENT_RATE) self.dispatcher = CallbackQueueDispatcher() diff --git a/awx/main/utils/filters.py b/awx/main/utils/filters.py index 7320cbc02f..f0d29c0d10 100644 --- a/awx/main/utils/filters.py +++ b/awx/main/utils/filters.py @@ -15,8 +15,8 @@ from django.apps import apps from django.db import models from django.conf import settings +from django_guid import get_guid from django_guid.log_filters import CorrelationId -from django_guid.middleware import GuidMiddleware from awx import MODE from awx.main.constants import LOGGER_BLOCKLIST @@ -366,7 +366,7 @@ class SmartFilter(object): class DefaultCorrelationId(CorrelationId): def filter(self, record): - guid = GuidMiddleware.get_guid() or '-' + guid = get_guid() or '-' if MODE == 'development': guid = guid[:8] record.guid = guid diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 4c6597181e..cab598cd81 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -935,7 +935,7 @@ AWX_CLEANUP_PATHS = True RECEPTOR_RELEASE_WORK = True MIDDLEWARE = [ - 'django_guid.middleware.GuidMiddleware', + 'django_guid.middleware.guid_middleware', 'awx.main.middleware.TimingMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'awx.main.middleware.MigrationRanCheckMiddleware', From 5d000c37d6970738db1e06c2e8368bc2ff34bb8e Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 16 Feb 2022 13:58:44 -0500 Subject: [PATCH 113/125] Deal with breaking tests for 3.2 - Instantiating an abstract model raises a TypeError --- awx/main/tests/unit/test_fields.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/awx/main/tests/unit/test_fields.py b/awx/main/tests/unit/test_fields.py index 8c00a95194..da669ae47d 100644 --- a/awx/main/tests/unit/test_fields.py +++ b/awx/main/tests/unit/test_fields.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +from unittest import mock import pytest from django.core.exceptions import ValidationError @@ -8,7 +9,7 @@ from django.db.models.fields.related_descriptors import ReverseManyToOneDescript from rest_framework.serializers import ValidationError as DRFValidationError -from awx.main.models import Credential, CredentialType, BaseModel +from awx.main.models import Credential, CredentialType from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDescriptor @@ -16,7 +17,7 @@ from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDesc 'schema, given, message', [ ( - { # immitates what the CredentialType injectors field is + { # imitates what the CredentialType injectors field is "additionalProperties": False, "type": "object", "properties": {"extra_vars": {"additionalProperties": False, "type": "object"}}, @@ -25,7 +26,7 @@ from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDesc "list provided in relative path ['extra_vars'], expected dict", ), ( - { # immitates what the CredentialType injectors field is + { # imitates what the CredentialType injectors field is "additionalProperties": False, "type": "object", }, @@ -35,7 +36,7 @@ from awx.main.fields import JSONSchemaField, ImplicitRoleField, ImplicitRoleDesc ], ) def test_custom_error_messages(schema, given, message): - instance = BaseModel() + instance = mock.Mock() class MockFieldSubclass(JSONSchemaField): def schema(self, model_instance): From 38ccea0f1fc6f8479f7740a75d9514bf937474d0 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 16 Feb 2022 14:09:59 -0500 Subject: [PATCH 114/125] Fix up warnings - the default auto-increment primary key field type is now configurable, and Django's check command issues a warning if you are just assuming the historical behavior of using AutoField. - Django 3.2 brings in automatic AppConfig discovery, so all of our explicit `default_app_config = ...` assignments in __init__.py modules are no longer needed, and raise a RemovedInDjango41Warning. --- awx/conf/__init__.py | 2 -- awx/main/__init__.py | 2 -- awx/settings/defaults.py | 5 +++++ awx/sso/__init__.py | 2 -- awx/ui/__init__.py | 2 -- 5 files changed, 5 insertions(+), 8 deletions(-) diff --git a/awx/conf/__init__.py b/awx/conf/__init__.py index 661295a685..3580b7a45a 100644 --- a/awx/conf/__init__.py +++ b/awx/conf/__init__.py @@ -7,8 +7,6 @@ from django.utils.module_loading import autodiscover_modules # AWX from .registry import settings_registry -default_app_config = 'awx.conf.apps.ConfConfig' - def register(setting, **kwargs): settings_registry.register(setting, **kwargs) diff --git a/awx/main/__init__.py b/awx/main/__init__.py index f500f439b6..e484e62be1 100644 --- a/awx/main/__init__.py +++ b/awx/main/__init__.py @@ -1,4 +1,2 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. - -default_app_config = 'awx.main.apps.MainConfig' diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index cab598cd81..b11710495e 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -41,6 +41,11 @@ else: DEBUG = True SQL_DEBUG = DEBUG +# FIXME: it would be nice to cycle back around and allow this to be +# BigAutoField going forward, but we'd have to be explicit about our +# existing models. +DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' + DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', diff --git a/awx/sso/__init__.py b/awx/sso/__init__.py index bb4e958844..e484e62be1 100644 --- a/awx/sso/__init__.py +++ b/awx/sso/__init__.py @@ -1,4 +1,2 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. - -default_app_config = 'awx.sso.apps.SSOConfig' diff --git a/awx/ui/__init__.py b/awx/ui/__init__.py index ac6a554356..e484e62be1 100644 --- a/awx/ui/__init__.py +++ b/awx/ui/__init__.py @@ -1,4 +1,2 @@ # Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. - -default_app_config = 'awx.ui.apps.UIConfig' From eb8a1fec4935bf4a174cdf35f23cc312f357f394 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Wed, 23 Feb 2022 10:55:21 -0500 Subject: [PATCH 115/125] Make API queries involving json fields cast the fields to text This restores compatibility with the way it previously worked -- we never supported proper json operator queries through the API. --- awx/api/filters.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/awx/api/filters.py b/awx/api/filters.py index 18539e3b85..10fc488006 100644 --- a/awx/api/filters.py +++ b/awx/api/filters.py @@ -9,8 +9,9 @@ from functools import reduce # Django from django.core.exceptions import FieldError, ValidationError, FieldDoesNotExist from django.db import models -from django.db.models import Q, CharField, IntegerField, BooleanField +from django.db.models import Q, CharField, IntegerField, BooleanField, TextField, JSONField from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey +from django.db.models.functions import Cast from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.fields import GenericForeignKey from django.utils.encoding import force_str @@ -241,6 +242,8 @@ class FieldLookupBackend(BaseFilterBackend): new_lookups.append('{}__{}__icontains'.format(new_lookup[:-8], rm_field.name)) return value, new_lookups, needs_distinct else: + if isinstance(field, JSONField): + new_lookup = new_lookup.replace(field.name, f'{field.name}_as_txt') value = self.value_to_python_for_field(field, value) return value, new_lookup, needs_distinct @@ -322,6 +325,9 @@ class FieldLookupBackend(BaseFilterBackend): value, new_key, distinct = self.value_to_python(queryset.model, key, value) if distinct: needs_distinct = True + if '_as_txt' in new_key: + fname = next(item for item in new_key.split('__') if item.endswith('_as_txt')) + queryset = queryset.annotate(**{fname: Cast(fname[:-7], output_field=TextField())}) if q_chain: chain_filters.append((q_not, new_key, value)) elif q_or: From 584514766d489815718dfe153aa49955d6342577 Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Fri, 4 Mar 2022 12:09:11 -0500 Subject: [PATCH 116/125] Disable our wrapping of the Django settings during migrations This will hopefully get us past the unfortunate check against the HostMetric table, which doesn't exist when you are upgrading from 3.8 to 4.x. Additionally, guard against AUTH_LDAP_GROUP_TYPE not being in settings for conf migration 0006. --- awx/conf/apps.py | 9 +++++++-- awx/conf/migrations/_ldap_group_type.py | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/awx/conf/apps.py b/awx/conf/apps.py index b865c61948..811baba262 100644 --- a/awx/conf/apps.py +++ b/awx/conf/apps.py @@ -1,3 +1,5 @@ +import sys + # Django from django.apps import AppConfig @@ -12,6 +14,9 @@ class ConfConfig(AppConfig): def ready(self): self.module.autodiscover() - from .settings import SettingsWrapper - SettingsWrapper.initialize() + if not set(sys.argv) & {'migrate', 'check_migrations'}: + + from .settings import SettingsWrapper + + SettingsWrapper.initialize() diff --git a/awx/conf/migrations/_ldap_group_type.py b/awx/conf/migrations/_ldap_group_type.py index 09caa2d28b..b6580f8cae 100644 --- a/awx/conf/migrations/_ldap_group_type.py +++ b/awx/conf/migrations/_ldap_group_type.py @@ -5,7 +5,7 @@ from django.utils.timezone import now def fill_ldap_group_type_params(apps, schema_editor): - group_type = settings.AUTH_LDAP_GROUP_TYPE + group_type = getattr(settings, 'AUTH_LDAP_GROUP_TYPE', None) Setting = apps.get_model('conf', 'Setting') group_type_params = {'name_attr': 'cn', 'member_attr': 'member'} From 574e3ed6ef2afa1e6f97b83e71be5be49624e2fc Mon Sep 17 00:00:00 2001 From: Jeff Bradberry Date: Tue, 8 Mar 2022 09:59:26 -0500 Subject: [PATCH 117/125] Fix the signature of RADIUSBackend.get_django_user to match what django-radius is now expecting. --- awx/sso/backends.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/sso/backends.py b/awx/sso/backends.py index 535cc9579c..e54a124560 100644 --- a/awx/sso/backends.py +++ b/awx/sso/backends.py @@ -199,7 +199,7 @@ class RADIUSBackend(BaseRADIUSBackend): if not user.has_usable_password(): return user - def get_django_user(self, username, password=None): + def get_django_user(self, username, password=None, groups=[], is_staff=False, is_superuser=False): return _get_or_set_enterprise_user(force_str(username), force_str(password), 'radius') From ce8b9750c9dee2f6b342a76d416d0df1169bff4f Mon Sep 17 00:00:00 2001 From: nixocio Date: Fri, 25 Feb 2022 12:30:22 -0500 Subject: [PATCH 118/125] Add several changes to Instance Groups Add several changes to API and UI related to Instance Groups. * Update summary_fields for DEFAULT_CONTROL_PLANE_QUEUE_NAME, and DEFAULT_EXECUTION_QUEUE_NAME. Rely on API validation for those fields. * Fix Instance Group list RBAC * Add validation for a couple of fields on the Instance Groups endpoint 1. is_container_group 2. policy_instance_percentage 3. policy_instance_list See: https://github.com/ansible/awx/issues/11130 Also: https://github.com/ansible/awx/issues/11718 --- awx/api/permissions.py | 9 -- awx/api/serializers.py | 15 ++++ awx/api/views/__init__.py | 2 - awx/main/access.py | 7 +- .../screens/InstanceGroup/ContainerGroup.js | 32 ++----- .../ContainerGroupAdd/ContainerGroupAdd.js | 4 +- .../ContainerGroupDetails.js | 5 +- .../ContainerGroupEdit/ContainerGroupEdit.js | 8 +- .../screens/InstanceGroup/InstanceGroup.js | 33 ++----- .../InstanceGroupAdd/InstanceGroupAdd.js | 4 +- .../InstanceGroupDetails.js | 11 +-- .../InstanceGroupEdit/InstanceGroupEdit.js | 8 +- .../InstanceGroupEdit.test.js | 26 +----- .../InstanceGroupList/InstanceGroupList.js | 86 ++----------------- .../screens/InstanceGroup/InstanceGroups.js | 30 +++---- .../InstanceGroup/InstanceGroups.test.js | 14 +++ .../shared/ContainerGroupForm.js | 27 +----- .../InstanceGroup/shared/InstanceGroupForm.js | 32 +------ .../shared/InstanceGroupForm.test.js | 40 --------- awx/ui/src/setupTests.js | 1 + 20 files changed, 80 insertions(+), 314 deletions(-) diff --git a/awx/api/permissions.py b/awx/api/permissions.py index bd6328495b..3608a23d33 100644 --- a/awx/api/permissions.py +++ b/awx/api/permissions.py @@ -4,8 +4,6 @@ # Python import logging -from django.conf import settings - # Django REST Framework from rest_framework.exceptions import MethodNotAllowed, PermissionDenied from rest_framework import permissions @@ -250,13 +248,6 @@ class IsSystemAdminOrAuditor(permissions.BasePermission): return request.user.is_superuser -class InstanceGroupTowerPermission(ModelAccessPermission): - def has_object_permission(self, request, view, obj): - if request.method == 'DELETE' and obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: - return False - return super(InstanceGroupTowerPermission, self).has_object_permission(request, view, obj) - - class WebhookKeyPermission(permissions.BasePermission): def has_object_permission(self, request, view, obj): return request.user.can_access(view.model, 'admin', obj, request.data) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 695f83d8d9..a4ee2ef7f0 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -4947,6 +4947,9 @@ class InstanceGroupSerializer(BaseSerializer): return res def validate_policy_instance_list(self, value): + if self.instance and self.instance.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: + if self.instance.policy_instance_list != value: + raise serializers.ValidationError(_('%s instance group policy_instance_list may not be changed.' % self.instance.name)) for instance_name in value: if value.count(instance_name) > 1: raise serializers.ValidationError(_('Duplicate entry {}.').format(instance_name)) @@ -4957,6 +4960,11 @@ class InstanceGroupSerializer(BaseSerializer): return value def validate_policy_instance_percentage(self, value): + if self.instance and self.instance.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: + if value != self.instance.policy_instance_percentage: + raise serializers.ValidationError( + _('%s instance group policy_instance_percentage may not be changed from the initial value set by the installer.' % self.instance.name) + ) if value and self.instance and self.instance.is_container_group: raise serializers.ValidationError(_('Containerized instances may not be managed via the API')) return value @@ -4975,6 +4983,13 @@ class InstanceGroupSerializer(BaseSerializer): return value + def validate_is_container_group(self, value): + if self.instance and self.instance.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: + if value != self.instance.is_container_group: + raise serializers.ValidationError(_('%s instance group is_container_group may not be changed.' % self.instance.name)) + + return value + def validate_credential(self, value): if value and not value.kubernetes: raise serializers.ValidationError(_('Only Kubernetes credentials can be associated with an Instance Group')) diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 163fa4e727..34e442eb5e 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -105,7 +105,6 @@ from awx.api.permissions import ( ProjectUpdatePermission, InventoryInventorySourcesUpdatePermission, UserPermission, - InstanceGroupTowerPermission, VariableDataPermission, WorkflowApprovalPermission, IsSystemAdminOrAuditor, @@ -480,7 +479,6 @@ class InstanceGroupDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAP name = _("Instance Group Detail") model = models.InstanceGroup serializer_class = serializers.InstanceGroupSerializer - permission_classes = (InstanceGroupTowerPermission,) def update_raw_data(self, data): if self.get_object().is_container_group: diff --git a/awx/main/access.py b/awx/main/access.py index 06b560b9ae..c608a7aa41 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -465,7 +465,7 @@ class BaseAccess(object): if display_method == 'schedule': user_capabilities['schedule'] = user_capabilities['start'] continue - elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob, CredentialInputSource, ExecutionEnvironment)): + elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob, CredentialInputSource, ExecutionEnvironment, InstanceGroup)): user_capabilities['delete'] = user_capabilities['edit'] continue elif display_method == 'copy' and isinstance(obj, (Group, Host)): @@ -575,6 +575,11 @@ class InstanceGroupAccess(BaseAccess): def can_change(self, obj, data): return self.user.is_superuser + def can_delete(self, obj): + if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]: + return False + return self.user.is_superuser + class UserAccess(BaseAccess): """ diff --git a/awx/ui/src/screens/InstanceGroup/ContainerGroup.js b/awx/ui/src/screens/InstanceGroup/ContainerGroup.js index 1e1be0b88d..3533b1c97a 100644 --- a/awx/ui/src/screens/InstanceGroup/ContainerGroup.js +++ b/awx/ui/src/screens/InstanceGroup/ContainerGroup.js @@ -13,7 +13,7 @@ import { CaretLeftIcon } from '@patternfly/react-icons'; import { Card, PageSection } from '@patternfly/react-core'; import useRequest from 'hooks/useRequest'; -import { InstanceGroupsAPI, SettingsAPI } from 'api'; +import { InstanceGroupsAPI } from 'api'; import RoutedTabs from 'components/RoutedTabs'; import ContentError from 'components/ContentError'; import ContentLoading from 'components/ContentLoading'; @@ -30,28 +30,15 @@ function ContainerGroup({ setBreadcrumb }) { isLoading, error: contentError, request: fetchInstanceGroups, - result: { instanceGroup, defaultControlPlane, defaultExecution }, + result: { instanceGroup }, } = useRequest( useCallback(async () => { - const [ - { data }, - { - data: { - DEFAULT_EXECUTION_QUEUE_NAME, - DEFAULT_CONTROL_PLANE_QUEUE_NAME, - }, - }, - ] = await Promise.all([ - InstanceGroupsAPI.readDetail(id), - SettingsAPI.readAll(), - ]); + const { data } = await InstanceGroupsAPI.readDetail(id); return { instanceGroup: data, - defaultControlPlane: DEFAULT_CONTROL_PLANE_QUEUE_NAME, - defaultExecution: DEFAULT_EXECUTION_QUEUE_NAME, }; }, [id]), - { instanceGroup: null, defaultExecution: '' } + { instanceGroup: null } ); useEffect(() => { @@ -125,17 +112,10 @@ function ContainerGroup({ setBreadcrumb }) { {instanceGroup && ( <> - + - + )} - {name !== defaultExecution && - instanceGroup.summary_fields.user_capabilities && + {instanceGroup.summary_fields.user_capabilities && instanceGroup.summary_fields.user_capabilities.delete && ( { - const [ - { data }, - { - data: { - DEFAULT_CONTROL_PLANE_QUEUE_NAME, - DEFAULT_EXECUTION_QUEUE_NAME, - }, - }, - ] = await Promise.all([ - InstanceGroupsAPI.readDetail(id), - SettingsAPI.readAll(), - ]); + const { data } = await InstanceGroupsAPI.readDetail(id); return { instanceGroup: data, - defaultControlPlane: DEFAULT_CONTROL_PLANE_QUEUE_NAME, - defaultExecution: DEFAULT_EXECUTION_QUEUE_NAME, }; }, [id]), - { instanceGroup: null, defaultControlPlane: '', defaultExecution: '' } + { instanceGroup: null } ); useEffect(() => { @@ -133,18 +120,10 @@ function InstanceGroup({ setBreadcrumb }) { {instanceGroup && ( <> - + - + @@ -115,8 +109,7 @@ function InstanceGroupDetails({ {t`Edit`} )} - {!isDefaultInstanceGroup && - instanceGroup.summary_fields.user_capabilities && + {instanceGroup.summary_fields.user_capabilities && instanceGroup.summary_fields.user_capabilities.delete && ( ', () => { history = createMemoryHistory(); await act(async () => { wrapper = mountWithContexts( - , + , { context: { router: { history } }, } @@ -70,27 +67,6 @@ describe('', () => { jest.clearAllMocks(); }); - test('controlplane instance group name can not be updated', async () => { - let towerWrapper; - await act(async () => { - towerWrapper = mountWithContexts( - , - { - context: { router: { history } }, - } - ); - }); - expect( - towerWrapper.find('input#instance-group-name').prop('disabled') - ).toBeTruthy(); - expect( - towerWrapper.find('input#instance-group-name').prop('value') - ).toEqual('controlplane'); - }); - test('handleSubmit should call the api and redirect to details page', async () => { await act(async () => { wrapper.find('InstanceGroupForm').invoke('onSubmit')( diff --git a/awx/ui/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupList.js b/awx/ui/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupList.js index 8f6e39edb6..dfce293dbd 100644 --- a/awx/ui/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupList.js +++ b/awx/ui/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupList.js @@ -4,7 +4,7 @@ import { useLocation, useRouteMatch, Link } from 'react-router-dom'; import { t, Plural } from '@lingui/macro'; import { Card, PageSection, DropdownItem } from '@patternfly/react-core'; -import { InstanceGroupsAPI, SettingsAPI } from 'api'; +import { InstanceGroupsAPI } from 'api'; import { getQSConfig, parseQueryString } from 'util/qs'; import useRequest, { useDeleteItems } from 'hooks/useRequest'; import useSelected from 'hooks/useSelected'; @@ -27,28 +27,6 @@ const QS_CONFIG = getQSConfig('instance-group', { page_size: 20, }); -function modifyInstanceGroups( - defaultControlPlane, - defaultExecution, - items = [] -) { - return items.map((item) => { - const clonedItem = { - ...item, - summary_fields: { - ...item.summary_fields, - user_capabilities: { - ...item.summary_fields.user_capabilities, - }, - }, - }; - if (clonedItem.name === (defaultControlPlane || defaultExecution)) { - clonedItem.summary_fields.user_capabilities.delete = false; - } - return clonedItem; - }); -} - function InstanceGroupList({ isKubernetes, isSettingsRequestLoading, @@ -56,30 +34,6 @@ function InstanceGroupList({ }) { const location = useLocation(); const match = useRouteMatch(); - const { - error: protectedItemsError, - isLoading: isLoadingProtectedItems, - request: fetchProtectedItems, - result: { defaultControlPlane, defaultExecution }, - } = useRequest( - useCallback(async () => { - const { - data: { - DEFAULT_CONTROL_PLANE_QUEUE_NAME, - DEFAULT_EXECUTION_QUEUE_NAME, - }, - } = await SettingsAPI.readAll(); - return { - defaultControlPlane: DEFAULT_CONTROL_PLANE_QUEUE_NAME, - defaultExecution: DEFAULT_EXECUTION_QUEUE_NAME, - }; - }, []), - { defaultControlPlane: '', defaultExecution: '' } - ); - - useEffect(() => { - fetchProtectedItems(); - }, [fetchProtectedItems]); const { error: contentError, @@ -127,12 +81,6 @@ function InstanceGroupList({ const { selected, isAllSelected, handleSelect, clearSelected, selectAll } = useSelected(instanceGroups); - const modifiedSelected = modifyInstanceGroups( - defaultControlPlane, - defaultExecution, - selected - ); - const { isLoading: deleteLoading, deletionError, @@ -158,28 +106,10 @@ function InstanceGroupList({ const canAdd = actions && actions.POST; - const cannotDelete = (item) => - !item.summary_fields.user_capabilities.delete || - item.name === defaultExecution || - item.name === defaultControlPlane; + const cannotDelete = (item) => !item.summary_fields.user_capabilities.delete; const pluralizedItemName = t`Instance Groups`; - let errorMessageDelete = ''; - const notdeletedable = selected.filter( - (i) => i.name === defaultControlPlane || i.name === defaultExecution - ); - - if (notdeletedable.length) { - errorMessageDelete = ( - - ); - } - const addContainerGroup = t`Add container group`; const addInstanceGroup = t`Add instance group`; @@ -229,14 +159,9 @@ function InstanceGroupList({ { const { - data: { - IS_K8S, - DEFAULT_CONTROL_PLANE_QUEUE_NAME, - DEFAULT_EXECUTION_QUEUE_NAME, - }, + data: { IS_K8S }, } = await SettingsAPI.readCategory('all'); return { isKubernetes: IS_K8S, - defaultControlPlane: DEFAULT_CONTROL_PLANE_QUEUE_NAME, - defaultExecution: DEFAULT_EXECUTION_QUEUE_NAME, }; }, []), - { isLoading: true } + { isKubernetes: false } ); useEffect(() => { - settingsRequest(); - }, [settingsRequest]); + userCanReadSettings && settingsRequest(); + }, [settingsRequest, userCanReadSettings]); const [breadcrumbConfig, setBreadcrumbConfig] = useState({ '/instance_groups': t`Instance Groups`, @@ -91,20 +89,14 @@ function InstanceGroups() { ) : ( - + {!isKubernetes && ( - + )} diff --git a/awx/ui/src/screens/InstanceGroup/InstanceGroups.test.js b/awx/ui/src/screens/InstanceGroup/InstanceGroups.test.js index fdd53c6e8f..84f269cc0a 100644 --- a/awx/ui/src/screens/InstanceGroup/InstanceGroups.test.js +++ b/awx/ui/src/screens/InstanceGroup/InstanceGroups.test.js @@ -2,6 +2,7 @@ import React from 'react'; import { shallow } from 'enzyme'; import { InstanceGroupsAPI } from 'api'; import InstanceGroups from './InstanceGroups'; +import { useUserProfile } from 'contexts/Config'; const mockUseLocationValue = { pathname: '', @@ -11,6 +12,19 @@ jest.mock('react-router-dom', () => ({ ...jest.requireActual('react-router-dom'), useLocation: () => mockUseLocationValue, })); + +beforeEach(() => { + useUserProfile.mockImplementation(() => { + return { + isSuperUser: true, + isSystemAuditor: false, + isOrgAdmin: false, + isNotificationAdmin: false, + isExecEnvAdmin: false, + }; + }); +}); + describe('', () => { test('should set breadcrumbs', () => { mockUseLocationValue.pathname = '/instance_groups'; diff --git a/awx/ui/src/screens/InstanceGroup/shared/ContainerGroupForm.js b/awx/ui/src/screens/InstanceGroup/shared/ContainerGroupForm.js index 41d7d60ac5..e9feed4e87 100644 --- a/awx/ui/src/screens/InstanceGroup/shared/ContainerGroupForm.js +++ b/awx/ui/src/screens/InstanceGroup/shared/ContainerGroupForm.js @@ -11,7 +11,7 @@ import FormField, { CheckboxField, } from 'components/FormField'; import FormActionGroup from 'components/FormActionGroup'; -import { combine, required, protectedResourceName } from 'util/validators'; +import { required } from 'util/validators'; import { FormColumnLayout, FormFullWidthLayout, @@ -21,21 +21,11 @@ import { import CredentialLookup from 'components/Lookup/CredentialLookup'; import { VariablesField } from 'components/CodeEditor'; -function ContainerGroupFormFields({ - instanceGroup, - defaultControlPlane, - defaultExecution, -}) { +function ContainerGroupFormFields({ instanceGroup }) { const { setFieldValue, setFieldTouched } = useFormikContext(); const [credentialField, credentialMeta, credentialHelpers] = useField('credential'); - const [, { initialValue }] = useField('name'); - - const isProtected = - initialValue === `${defaultControlPlane}` || - initialValue === `${defaultExecution}`; - const [overrideField] = useField('override'); const handleCredentialUpdate = useCallback( @@ -50,21 +40,10 @@ function ContainerGroupFormFields({ <> ', () => { wrapper.find('button[aria-label="Cancel"]').invoke('onClick')(); expect(onCancel).toBeCalled(); }); - - test('Name field should be disabled, default', async () => { - let defaultInstanceGroupWrapper; - await act(async () => { - defaultInstanceGroupWrapper = mountWithContexts( - - ); - }); - expect( - defaultInstanceGroupWrapper - .find('TextInput[name="name"]') - .prop('isDisabled') - ).toBe(true); - }); - - test('Name field should be disabled, controlplane', async () => { - let defaultInstanceGroupWrapper; - await act(async () => { - defaultInstanceGroupWrapper = mountWithContexts( - - ); - }); - expect( - defaultInstanceGroupWrapper - .find('TextInput[name="name"]') - .prop('isDisabled') - ).toBe(true); - }); }); diff --git a/awx/ui/src/setupTests.js b/awx/ui/src/setupTests.js index dc7d49014d..b9d84f3803 100644 --- a/awx/ui/src/setupTests.js +++ b/awx/ui/src/setupTests.js @@ -93,6 +93,7 @@ jest.doMock('./contexts/Config', () => ({ Config: MockConfigContext.Consumer, useConfig: () => React.useContext(MockConfigContext), useAuthorizedPath: jest.fn(), + useUserProfile: jest.fn(), })); // ? From 2321f06c8a9d4da92d17895e65b496c1e39bf497 Mon Sep 17 00:00:00 2001 From: Alan Rominger Date: Fri, 11 Mar 2022 13:34:38 -0500 Subject: [PATCH 119/125] Only clear hop node errors if they were lost before, not lost now --- awx/main/tasks/system.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/awx/main/tasks/system.py b/awx/main/tasks/system.py index af02d3de55..927bbbee2a 100644 --- a/awx/main/tasks/system.py +++ b/awx/main/tasks/system.py @@ -457,8 +457,9 @@ def inspect_execution_nodes(instance_list): # Only execution nodes should be dealt with by execution_node_health_check if instance.node_type == 'hop': - logger.warning(f'Hop node {hostname}, has rejoined the receptor mesh') - instance.save_health_data(errors='') + if was_lost and (not instance.is_lost(ref_time=nowtime)): + logger.warning(f'Hop node {hostname}, has rejoined the receptor mesh') + instance.save_health_data(errors='') continue if was_lost: From fcb1c4823ead24b7096d9d14e6d2d489cd6573b1 Mon Sep 17 00:00:00 2001 From: Jamie Slome Date: Tue, 15 Mar 2022 13:59:42 +0000 Subject: [PATCH 120/125] Create SECURITY.md --- SECURITY.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..2e8815a38d --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,3 @@ +For all security related bugs, email security@ansible.com instead of using this issue tracker and you will receive a prompt response. + +For more information on the Ansible community's practices regarding responsible disclosure, see https://www.ansible.com/security From 13db49aab7d1e43eb7e5b427b7cd8143289e6928 Mon Sep 17 00:00:00 2001 From: "Keith J. Grant" Date: Wed, 16 Mar 2022 13:25:51 -0700 Subject: [PATCH 121/125] display current page name in document title --- awx/ui/src/App.js | 13 ++----------- .../components/ScreenHeader/ScreenHeader.js | 13 ++++++++++++- awx/ui/src/hooks/useTitle.js | 19 +++++++++++++++++++ .../screens/ActivityStream/ActivityStream.js | 2 ++ awx/ui/src/setupTests.js | 1 + 5 files changed, 36 insertions(+), 12 deletions(-) create mode 100644 awx/ui/src/hooks/useTitle.js diff --git a/awx/ui/src/App.js b/awx/ui/src/App.js index 674dec8b07..bf1701eec9 100644 --- a/awx/ui/src/App.js +++ b/awx/ui/src/App.js @@ -27,7 +27,7 @@ import { isAuthenticated } from 'util/auth'; import { getLanguageWithoutRegionCode } from 'util/language'; import Metrics from 'screens/Metrics'; import SubscriptionEdit from 'screens/Setting/Subscription/SubscriptionEdit'; -import { RootAPI } from 'api'; +import useTitle from 'hooks/useTitle'; import { dynamicActivate, locales } from './i18nLoader'; import getRouteConfig from './routeConfig'; import { SESSION_REDIRECT_URL } from './constants'; @@ -150,16 +150,7 @@ function App() { dynamicActivate(language); }, [language]); - useEffect(() => { - async function fetchBrandName() { - const { - data: { BRAND_NAME }, - } = await RootAPI.readAssetVariables(); - - document.title = BRAND_NAME; - } - fetchBrandName(); - }, []); + useTitle(); const redirectURL = window.sessionStorage.getItem(SESSION_REDIRECT_URL); if (redirectURL) { diff --git a/awx/ui/src/components/ScreenHeader/ScreenHeader.js b/awx/ui/src/components/ScreenHeader/ScreenHeader.js index 670f8a4f51..eeb2b5f990 100644 --- a/awx/ui/src/components/ScreenHeader/ScreenHeader.js +++ b/awx/ui/src/components/ScreenHeader/ScreenHeader.js @@ -1,5 +1,6 @@ import React from 'react'; import PropTypes from 'prop-types'; +import useTitle from 'hooks/useTitle'; import { t } from '@lingui/macro'; import { @@ -12,7 +13,7 @@ import { Tooltip, } from '@patternfly/react-core'; import { HistoryIcon } from '@patternfly/react-icons'; -import { Link, Route, useRouteMatch } from 'react-router-dom'; +import { Link, Route, useRouteMatch, useLocation } from 'react-router-dom'; const ScreenHeader = ({ breadcrumbConfig, streamType }) => { const { light } = PageSectionVariants; @@ -20,6 +21,16 @@ const ScreenHeader = ({ breadcrumbConfig, streamType }) => { path: Object.keys(breadcrumbConfig)[0], strict: true, }); + + const location = useLocation(); + const parts = location.pathname.split('/'); + if (parts.length > 2) { + parts.pop(); + } + + const pathTitle = breadcrumbConfig[parts.join('/')]; + useTitle(pathTitle); + const isOnlyOneCrumb = oneCrumbMatch && oneCrumbMatch.isExact; return ( diff --git a/awx/ui/src/hooks/useTitle.js b/awx/ui/src/hooks/useTitle.js new file mode 100644 index 0000000000..3f960ccb21 --- /dev/null +++ b/awx/ui/src/hooks/useTitle.js @@ -0,0 +1,19 @@ +import { useEffect } from 'react'; +import useBrandName from './useBrandName'; + +export default function useTitle(title) { + const brandName = useBrandName(); + + useEffect(() => { + const prevTitle = document.title; + if (title) { + document.title = `${brandName} | ${title}`; + } else { + document.title = brandName; + } + + return () => { + document.title = prevTitle; + }; + }, [title, brandName]); +} diff --git a/awx/ui/src/screens/ActivityStream/ActivityStream.js b/awx/ui/src/screens/ActivityStream/ActivityStream.js index c4e196ae42..505608120e 100644 --- a/awx/ui/src/screens/ActivityStream/ActivityStream.js +++ b/awx/ui/src/screens/ActivityStream/ActivityStream.js @@ -20,6 +20,7 @@ import PaginatedTable, { getSearchableKeys, } from 'components/PaginatedTable'; import useRequest from 'hooks/useRequest'; +import useTitle from 'hooks/useTitle'; import { getQSConfig, parseQueryString, updateQueryString } from 'util/qs'; import { ActivityStreamAPI } from 'api'; @@ -31,6 +32,7 @@ function ActivityStream() { const [isTypeDropdownOpen, setIsTypeDropdownOpen] = useState(false); const location = useLocation(); const history = useHistory(); + useTitle(t`Activity Stream`); const urlParams = new URLSearchParams(location.search); const activityStreamType = urlParams.get('type') || 'all'; diff --git a/awx/ui/src/setupTests.js b/awx/ui/src/setupTests.js index b9d84f3803..9625b30276 100644 --- a/awx/ui/src/setupTests.js +++ b/awx/ui/src/setupTests.js @@ -61,6 +61,7 @@ jest.mock('axios', () => ({ }, }), })); +jest.mock('hooks/useTitle'); afterEach(() => { if (networkRequestUrl) { From c912dd4e760926c984f8ef4b2674275af14245bc Mon Sep 17 00:00:00 2001 From: nixocio Date: Thu, 17 Mar 2022 16:35:14 -0400 Subject: [PATCH 122/125] Fix credential encrypted Display credential as encrypted. See: https://github.com/ansible/awx/issues/11915 --- awx/ui/src/screens/Template/Survey/SurveyReorderModal.js | 7 +++++++ .../src/screens/Template/Survey/SurveyReorderModal.test.js | 5 ++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/awx/ui/src/screens/Template/Survey/SurveyReorderModal.js b/awx/ui/src/screens/Template/Survey/SurveyReorderModal.js index e5bf1980c4..f3c84a6371 100644 --- a/awx/ui/src/screens/Template/Survey/SurveyReorderModal.js +++ b/awx/ui/src/screens/Template/Survey/SurveyReorderModal.js @@ -116,6 +116,13 @@ function SurveyReorderModal({ const defaultAnswer = (q) => { let component = null; switch (q.type) { + case 'password': + component = ( + + {t`encrypted`.toUpperCase()} + + ); + break; case 'textarea': component = (
handleSelect(subscription), + onSelect: () => setSelected([subscription]), isSelected: selected.some( - (row) => row.pool_id === subscription.pool_id + (row) => row.id === subscription.id ), variant: 'radio', - rowIndex: `row-${subscription.pool_id}`, + rowIndex: `row-${subscription.id}`, }} /> {subscription.subscription_name}