Difference between revisions of "Install AWX on K3S"
Jump to navigation
Jump to search
(Created page with "Since version 18, AWX, the comunity edition of ansible tower gets deployed by a kubernetes operator. This makes it easier to install and maintain the installation, but not all...") |
|||
| (25 intermediate revisions by the same user not shown) | |||
| Line 1: | Line 1: | ||
| − | Since version 18, AWX, the comunity edition of ansible tower gets deployed by a kubernetes operator. | + | Since version 18, AWX, the comunity edition of ansible tower gets deployed by a kubernetes operator.<br> |
| − | This makes it easier to install and maintain the installation, but not all of us are familiar with kubernetes and operators. | + | This makes it easier to install and maintain the installation, but not all of us are familiar with kubernetes and operators.<br> |
| − | So I share a short step by step guide on how to setup ansible awx in a "semi professional" way on a single k3s kubernetes node. | + | So I share a short step by step guide on how to setup ansible awx in a "semi professional" way on a single k3s kubernetes node.<br> |
=VM Setup= | =VM Setup= | ||
| Line 14: | Line 14: | ||
==Prepare OS== | ==Prepare OS== | ||
dnf -y upgrade | dnf -y upgrade | ||
| − | dnf -y install setroubleshoot-server curl lsof wget | + | dnf -y install setroubleshoot-server curl lsof wget make |
sed -i '/swap/d' /etc/fstab | sed -i '/swap/d' /etc/fstab | ||
| Line 23: | Line 23: | ||
firewall-cmd --reload | firewall-cmd --reload | ||
reboot | reboot | ||
| + | |||
| + | |||
=Setup K3S= | =Setup K3S= | ||
| − | + | curl -sfL https://get.k3s.io | sh | |
| − | curl -sfL https://get.k3s.io | sh | ||
cat /etc/systemd/system/k3s.service | cat /etc/systemd/system/k3s.service | ||
| Line 36: | Line 37: | ||
# all pods in running state? fine! | # all pods in running state? fine! | ||
kubectl get pods --all-namespaces | kubectl get pods --all-namespaces | ||
| + | |||
| + | # install kustomize | ||
| + | cd /usr/local/sbin/ | ||
| + | curl -s "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" | bash | ||
=Deploy AWX= | =Deploy AWX= | ||
| − | = | + | <pre> |
| − | + | export NAMESPACE=awx | |
| − | + | kubectl create namespace $NAMESPACE | |
| + | kubectl config set-context --namespace=$NAMESPACE --current | ||
| − | + | cd ; mkdir awx ; cd awx | |
| − | + | vim kustomization.yaml | |
| + | ------------ | ||
| + | --- | ||
| + | apiVersion: kustomize.config.k8s.io/v1beta1 | ||
| + | kind: Kustomization | ||
| + | resources: | ||
| + | # Find the latest tag here: https://github.com/ansible/awx-operator/releases | ||
| + | - github.com/ansible/awx-operator/config/default?ref=0.20.0 | ||
| + | - awx-bitbull.yml | ||
| − | + | # Set the image tags to match the git version from above | |
| − | + | images: | |
| − | + | - name: quay.io/ansible/awx-operator | |
| − | + | newTag: 0.20.0 | |
| − | |||
| − | + | # Specify a custom namespace in which to install AWX | |
| − | + | namespace: awx | |
| − | + | ------------ | |
| − | |||
| − | + | vim awx-bitbull.yml | |
| − | + | ------------ | |
--- | --- | ||
apiVersion: awx.ansible.com/v1beta1 | apiVersion: awx.ansible.com/v1beta1 | ||
kind: AWX | kind: AWX | ||
metadata: | metadata: | ||
| − | name: | + | name: bitbull |
spec: | spec: | ||
| − | + | ingress_type: route | |
| − | + | route_host: ansible.apps.bitbull.ch | |
| − | + | route_tls_termination_mechanism: Edge | |
| − | + | ------------ | |
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | + | kustomize build . | kubectl apply -f - | |
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
| − | |||
</pre> | </pre> | ||
| − | + | [[Category:Ansible]] | |
| − | + | [[Category:K3S]] | |
| + | [[Category:OpenShift & K8S]] | ||
=Fetch the secret and test the login= | =Fetch the secret and test the login= | ||
| − | kubectl get secret | + | kubectl get secret bitbull-admin-password -o jsonpath='{.data.password}' | base64 --decode |
| − | firefox https://fqdn | + | firefox https://fqdn.domain.com |
* user: admin | * user: admin | ||
| Line 154: | Line 123: | ||
kubectl exec --stdin --tty <pod-name> -c <container-name> -- /bin/bash | kubectl exec --stdin --tty <pod-name> -c <container-name> -- /bin/bash | ||
| + | |||
| + | =Notes= | ||
| + | ==Backup== | ||
| + | |||
| + | vim awx-backup.yml | ||
| + | <pre> | ||
| + | --- | ||
| + | apiVersion: awx.ansible.com/v1beta1 | ||
| + | kind: AWXBackup | ||
| + | metadata: | ||
| + | name: awxbackup-20220311 | ||
| + | namespace: awx | ||
| + | spec: | ||
| + | deployment_name: awx | ||
| + | ... | ||
| + | </pre> | ||
| + | |||
| + | oc apply -f awx-backup.yml | ||
| + | |||
| + | oc get awxbackups awxbackup-20220311 -o yaml | ||
| + | |||
| + | <pre> | ||
| + | apiVersion: awx.ansible.com/v1beta1 | ||
| + | kind: AWXBackup | ||
| + | ... | ||
| + | name: awxbackup-20220311 | ||
| + | ... | ||
| + | status: | ||
| + | backupClaim: awx-backup-claim | ||
| + | backupDirectory: /backups/tower-openshift-backup-2022-03-11-05:45:56 | ||
| + | conditions: | ||
| + | - lastTransitionTime: "2022-03-11T05:45:07Z" | ||
| + | reason: Successful | ||
| + | status: "True" | ||
| + | type: Running | ||
| + | </pre> | ||
| + | |||
| + | Keep that as well for DR reason | ||
| + | oc get awxbackups awxbackup-20220311 -o yaml > awxbackup-20220311.yml | ||
| + | |||
| + | <pre> | ||
| + | ll /srv/nfs/pv05/tower-openshift-backup-2022-03-11-05\:45\:56/ | ||
| + | total 13072 | ||
| + | -rw-r--r--. 1 1000680000 root 600 Mar 11 06:46 awx_object | ||
| + | -rw-r--r--. 1 1000680000 root 670 Mar 11 06:46 secrets.yml | ||
| + | -rw-------. 1 1000680000 root 13377441 Mar 11 06:46 tower.db | ||
| + | </pre> | ||
| + | |||
| + | ==Restore== | ||
| + | vim awx-restore.yml | ||
| + | <pre> | ||
| + | --- | ||
| + | apiVersion: awx.ansible.com/v1beta1 | ||
| + | kind: AWXRestore | ||
| + | metadata: | ||
| + | name: awxrestore-20230221 | ||
| + | namespace: awx | ||
| + | spec: | ||
| + | deployment_name: bitbull | ||
| + | backup_pvc_namespace: awx | ||
| + | backup_dir: /backups/tower-openshift-backup-2023-02-20-17:04:58 | ||
| + | backup_pvc: awx-backup-claim | ||
| + | ... | ||
| + | </pre> | ||
| + | |||
| + | oc apply -f awx-restore.yml | ||
| + | |||
| + | oc get awxrestores -o yaml | ||
| + | |||
| + | [[Category:Ansible]] | ||
| + | [[Category:K3S]] | ||
| + | [[Category:OpenShift & K8S]] | ||
| + | |||
| + | ==okd 4.10 instance template== | ||
| + | <pre> | ||
| + | apiVersion: awx.ansible.com/v1beta1 | ||
| + | kind: AWX | ||
| + | metadata: | ||
| + | name: bitbull | ||
| + | spec: | ||
| + | ingress_type: route | ||
| + | route_host: awx.domain.com | ||
| + | route_tls_termination_mechanism: edge | ||
| + | </pre> | ||
| + | |||
| + | |||
| + | |||
| + | |||
| + | |||
| + | ==AWX CLI== | ||
| + | * https://github.com/ansible/awx/blob/devel/INSTALL.md#installing-the-awx-cli | ||
| + | <pre> | ||
| + | pip3 install awxkit | ||
| + | export TOWER_HOST=https://awx.domain.com TOWER_USERNAME=admin TOWER_PASSWORD=xxx | ||
| + | awx login admin | ||
| + | awx export > export.json | ||
| + | </pre> | ||
| + | |||
| + | [[Category:Ansible]] | ||
| + | [[Category:K3S]] | ||
| + | [[Category:OpenShift & K8S]] | ||
Latest revision as of 13:00, 21 February 2023
Since version 18, AWX, the comunity edition of ansible tower gets deployed by a kubernetes operator.
This makes it easier to install and maintain the installation, but not all of us are familiar with kubernetes and operators.
So I share a short step by step guide on how to setup ansible awx in a "semi professional" way on a single k3s kubernetes node.
Contents
1 VM Setup
1.1 VM requirements
Just setup a CentOS8 minimal VM with the following requirements
- OS: centos8 minimal
- CPU: 2
- MEM: 8GB (6 GB may work as well)
- DISK: 40G (7GB used on a fresh setup)
1.2 Prepare OS
dnf -y upgrade dnf -y install setroubleshoot-server curl lsof wget make
sed -i '/swap/d' /etc/fstab swapoff -a
firewall-cmd --permanent --zone=public --add-service=https firewall-cmd --zone=public --add-masquerade --permanent firewall-cmd --reload reboot
2 Setup K3S
curl -sfL https://get.k3s.io | sh
cat /etc/systemd/system/k3s.service systemctl status k3s
kubectl get nodes # all pods in running state? fine! kubectl get pods --all-namespaces
# install kustomize cd /usr/local/sbin/ curl -s "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" | bash
3 Deploy AWX
export NAMESPACE=awx
kubectl create namespace $NAMESPACE
kubectl config set-context --namespace=$NAMESPACE --current
cd ; mkdir awx ; cd awx
vim kustomization.yaml
------------
---
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
# Find the latest tag here: https://github.com/ansible/awx-operator/releases
- github.com/ansible/awx-operator/config/default?ref=0.20.0
- awx-bitbull.yml
# Set the image tags to match the git version from above
images:
- name: quay.io/ansible/awx-operator
newTag: 0.20.0
# Specify a custom namespace in which to install AWX
namespace: awx
------------
vim awx-bitbull.yml
------------
---
apiVersion: awx.ansible.com/v1beta1
kind: AWX
metadata:
name: bitbull
spec:
ingress_type: route
route_host: ansible.apps.bitbull.ch
route_tls_termination_mechanism: Edge
------------
kustomize build . | kubectl apply -f -
4 Fetch the secret and test the login
kubectl get secret bitbull-admin-password -o jsonpath='{.data.password}' | base64 --decode
firefox https://fqdn.domain.com
- user: admin
5 Links
- https://rancher.com/docs/k3s/latest/en/quick-start/
- https://rancher.com/docs/k3s/latest/en/backup-restore/
- https://github.com/ansible/awx-operator
6 Debug Notes
6.1 Open Node Port for direct access
PORT=$(kubectl describe svc awx-service | grep NodePort: | awk '{print $3}' | tr 'A-Z' 'a-z')
echo PORT=$PORT
firewall-cmd --zone=public --add-port=$PORT
6.2 Disable SELinux
setenforce 0 > /var/log/audit/audit.log # do some bad things sealert -a /var/log/audit/audit.log
6.3 Traefik Config
- https://levelup.gitconnected.com/a-guide-to-k3s-ingress-using-traefik-with-nodeport-6eb29add0b4b
kubectl -n kube-system edit cm traefik
6.4 Jump into container for debugging
# get pods kubectl get pods # get containers inside of pods kubectl describe <pod-name>
kubectl exec --stdin --tty <pod-name> -c <container-name> -- /bin/bash
7 Notes
7.1 Backup
vim awx-backup.yml
--- apiVersion: awx.ansible.com/v1beta1 kind: AWXBackup metadata: name: awxbackup-20220311 namespace: awx spec: deployment_name: awx ...
oc apply -f awx-backup.yml
oc get awxbackups awxbackup-20220311 -o yaml
apiVersion: awx.ansible.com/v1beta1
kind: AWXBackup
...
name: awxbackup-20220311
...
status:
backupClaim: awx-backup-claim
backupDirectory: /backups/tower-openshift-backup-2022-03-11-05:45:56
conditions:
- lastTransitionTime: "2022-03-11T05:45:07Z"
reason: Successful
status: "True"
type: Running
Keep that as well for DR reason
oc get awxbackups awxbackup-20220311 -o yaml > awxbackup-20220311.yml
ll /srv/nfs/pv05/tower-openshift-backup-2022-03-11-05\:45\:56/ total 13072 -rw-r--r--. 1 1000680000 root 600 Mar 11 06:46 awx_object -rw-r--r--. 1 1000680000 root 670 Mar 11 06:46 secrets.yml -rw-------. 1 1000680000 root 13377441 Mar 11 06:46 tower.db
7.2 Restore
vim awx-restore.yml
--- apiVersion: awx.ansible.com/v1beta1 kind: AWXRestore metadata: name: awxrestore-20230221 namespace: awx spec: deployment_name: bitbull backup_pvc_namespace: awx backup_dir: /backups/tower-openshift-backup-2023-02-20-17:04:58 backup_pvc: awx-backup-claim ...
oc apply -f awx-restore.yml
oc get awxrestores -o yaml
7.3 okd 4.10 instance template
apiVersion: awx.ansible.com/v1beta1 kind: AWX metadata: name: bitbull spec: ingress_type: route route_host: awx.domain.com route_tls_termination_mechanism: edge
7.4 AWX CLI
pip3 install awxkit export TOWER_HOST=https://awx.domain.com TOWER_USERNAME=admin TOWER_PASSWORD=xxx awx login admin awx export > export.json