Rundeck (Job scheduler and Runbook automation)

http://rundeck.org/3.0.x-SNAPSHOT/administration/install/ubuntudebian.html

ansible-playbook rundeck-ansible.example.com.yml -i your_inventory

<?php
$URL="https://raw.githubusercontent.com/panticz/ansible/master/rundeck-ansible.example.com.yml";
echo "

";
$c = curl_init();
curl_setopt($c, CURLOPT_URL, $URL);
curl_setopt($c, CURLOPT_RETURNTRANSFER, 1);
echo htmlspecialchars(curl_exec($c));
curl_close($c);
echo "

";
?>

Ansible role
https://github.com/panticz/ansible/tree/master/roles/rundeck
https://github.com/panticz/ansible/blob/master/rundeck.example.com.yml

# install
wget http://dl.bintray.com/rundeck/rundeck-deb/rundeck-2.7.1-1-GA.deb -P /tmp
sudo dpkg -i /tmp/rundeck-2.7.1-1-GA.deb
sudo /etc/init.d/rundeckd start

# Rundeck CLI
sudo apt-get install rundeck-cli
https://rundeck.github.io/rundeck-cli/
https://github.com/rundeck/rundeck-cli/issues/62

# Admin
http://localhost:4440/user/login;jsessionid=16lnsilhg9n6x38a090agwe8y
admin/admin

Configuration
# (global)
/etc/rundeck/framework.properties

# job database
/var/lib/rundeck/data/rundeckdb.mv.db

# hosts
/var/rundeck/projects/JOB_NAME/etc/resources.xml

# add user
echo "foo:bar,user,devops" >> /etc/rundeck/realm.properties

# acl
/var/rundeck/projects//acls/.aclpolicy

Configuration (project)
# configure nodes
/var/rundeck/projects//etc/resources.xml

# use native ssh agent to access host behind proxy / bastion
/var/rundeck/projects//etc/project.properties
plugin.script-exec.default.command=/usr/bin/ssh ${node.username}@${node.hostname} ${exec.command}
plugin.script-copy.default.command=/usr/bin/scp ${file-copy.file} ${node.username}@${node.hostname}\:${file-copy.destination}

Email notification
http://rundeck.org/docs/administration/email-settings.html

# /etc/rundeck/rundeck-config.properties
grails.mail.host=smtp.example.com
grails.mail.port=25
grails.mail.username=foo
grails.mail.password=bar

# restart service
service rundeckd restart

# ssh
mkdir /var/lib/rundeck/.ssh
chown rundeck:rundeck /var/lib/rundeck/.ssh
chmod 700 /var/lib/rundeck/.ssh
touch /var/lib/rundeck/.ssh/id_rsa
chown rundeck:rundeck /var/lib/rundeck/.ssh/id_rsa
chmod 600 /var/lib/rundeck/.ssh/id_rsa

# log
tail -f /var/log/rundeck/*.log

# Documentation
http://rundeck.org/
http://rundeck.org/docs/manual/getting-started.html
http://rundeck.org/2.3.2/administration/configuring-ssl.html

Plugins
https://www.rundeck.com/integrations/plugins
http://rundeck.org/plugins/ansible/2016/03/11/ansible-plugin.html
http://rundeck.org/plugins/2013/01/01/jenkins-rundeck.html
http://rundeck.org/plugins/2013/01/01/aws-ec2-nodes.html

# scm
https://docs.rundeck.com/docs/developer/scm-plugins.html
/var/rundeck/projects/PROJECT_NAME/scm

Docker
DockerHub images: https://hub.docker.com/r/rundeck/rundeck/

# Create container with shared SSH keys an forward GUI to localhost
sudo docker run --name rundeck -p 4440:4440 -v /home/${USER}/.ssh:/home/rundeck/.ssh rundeck/rundeck:SNAPSHOT

# Credentials
url: http://127.0.0.1:4440/
user: admin
pass: admin

https://github.com/x110dc/docker-rundeck

Change default admin password
https://docs.rundeck.com/docs/administration/security/authenticating-users.html
#RD_PASS=$(openssl rand -base64 16)
#echo ${RD_PASS}
#RD_PASS_MD5=$(java -cp /var/lib/rundeck/bootstrap/jetty-all-9.0.7.v20131107.jar org.eclipse.jetty.util.security.Password admin ${RD_PASS} 2>&1 | grep MD5)
#sed -i "s/^admin:admin/admin:MD5:${RD_PASS_MD5}/g" /etc/rundeck/realm.properties
java -jar /var/lib/rundeck/bootstrap/rundeck-*.war --encryptpwd Jetty
service rundeckd restart

# echo "framework.server.password = MD5:${RD_PASS_MD5}" >> /etc/rundeck/framework.properties

# Notify icinga
Local Command:
ssh monitoring.example.com '/usr/bin/printf "[%lu] SCHEDULE_FORCED_SVC_CHECK;%s;%s;%s\n" $(date +%s) ${node.name} APT $(date +%s) | tee -a /var/lib/icinga/rw/icinga.cmd'

# User authentification
http://rundeck.org/docs/administration/authenticating-users.html

Changelog
http://rundeck.org/docs/history/changelog.html

Rundeck jobs and scripts repository
https://github.com/panticz/rundeck

Job options
http://rundeck.org/2.0.0/manual/job-options.html
# use in bash
NODES=$(echo $RD_OPTION_NODES | sed 's/,/ /g')

# remote URL
https://docs.gitlab.com/ee/api/repository_files.html#get-file-from-repository
file:///tmp/foo.json
# dep # https://gitlab.example.com/foo/bar/raw/master/file.json?private_token=foo1234

https://gitlab.example.com/api/v4/projects/3/repository/files/${job.project}%2F${globals.environment}%2Foptions-hosts.json/raw?ref=master&private_token=${globals.private_token}

Pipe command
echo ${option.RSA} | tee /tmp/debug.txt

Commands
# check file size
'[[ $(find /media/backup/db.mr/ -size +50M -name exp_$(date -I)_*.dmp.bz2 ) ]]'

API call with parameter from curl
http://rundeck.org/docs/api/index.html
http://rundeck.org/docs/api/#running-a-job

curl -H "X-Rundeck-Auth-Token: ABCDEFGHIJKLMNOPRST1234567890" --data-urlencode "argString=-sku 'item1 item2 item3'" -X POST http://rundeck.example.com/api/25/job/26356713-8285-479e-860f-221559a64c23422/run

# fix Failed loading remote option values / Exception: java.lang.Exception: Unexpected content type received: text/plain; charset=utf-8 in Rundeck Allowed Values > Remote URL with currently:
# https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/23442
echo "framework.globals.environment=dev" >> /etc/rundeck/framework.properties

Output
# color
grep --color=always foo /path/to/file

CLI commands (draft)
rd projects create -p ops
rd projects configure set -p ops --file ~/project.properties
rd keys create --type privateKey --path keys/lib/rundeck/.ssh/github_rsa --file ~/github_rsa
rd projects scm setup -p ops --integration import --type git-import --file ~/scm-import.json
rd projects scm perform -p ops -i import --action initialize-tracking -f useFilePattern="true" filePattern=".*\\.yaml"
rd projects scm perform -i import -p ops --action import-all
rd projects scm perform -i import -p ops --action import-all --item "ops/my-job-file.yaml"

Fix "Import Status: Not Tracked" issue
apt install -y git
cd /var/rundeck/projects/DevOps/scm
git pull

job options
https://rundeck.org/docs/man5/resource-json.html
https://rundeck.org/docs/manual/defining-job-options.html
https://rundeck.org/2.8.2/administration/configuration-file-reference.html
https://rundeck.org/2.9.2/administration/gui-customization.html

Ansible snippets

# login as user ubuntu, use python3 and relogin as root
- hosts:
- vm1
- vm2
become: yes
vars:
ansible_python_interpreter: /usr/bin/python3
ansible_ssh_user: ubuntu
tasks:
- include: "{{ inventory_hostname }}.yml"

# show user and host
- debug:
msg="{{ ansible_user_id }}@{{ inventory_hostname }}"

# show host groups
- debug:
msg: "{{ group_names }}"

- debug:
var: hostvars[inventory_hostname]

- debug:
msg: "{{ ansible_system_vendor }}"

- debug:
msg: "ansible_default_ipv4["address"]"

- debug:
msg: "{{ vms | length

Sonoff Basic / ITEAD ESP8266

Flash ESPEasy with a FTDI adapter
sudo apt-get install -y unzip wget python-minimal python-serial
wget -q https://codeload.github.com/espressif/esptool/zip/master -qO /tmp/espressif.zip
unzip /tmp/espressif.zip -d /tmp

wget http://www.letscontrolit.com/downloads/ESPEasy_R147_RC8.zip -qO /tmp/ESPEasy_R147_RC8.zip
unzip /tmp/ESPEasy_R147_RC8.zip -d /tmp
/tmp/esptool-master/esptool.py --port /dev/ttyUSB0 write_flash --flash_mode dio --flash_size 1MB 0x0 /tmp/ESPEasy_R147_1024.bin

# Connect to temporary WiFi access point
SSID: ESP_0
pass: configesp

ZFS filesystem on Linux

Create ZFS filesystem
apt install -y zfsutils-linux
zpool create tank /dev/system/lxd
zfs create -o mountpoint=/var/lib/lxd2 tank/lxd

boot Ubuntu 16.04 LiveCD
terminal
sudo apt-get install -y ssh
sudo passwd ubuntu
ip a

Mount all datasets
zfs mount -a

SSH login to Ubuntu LiveCD
ssh ubuntu@

sudo su

sudo apt-add-repository universe
sudo apt update

apt install -y debootstrap zfs-initramfs

# rmove previous ZFS pool
zpool export rpool

DEVICES="

webix

datatable
# reload from external source
datatable1.clearAll();
datatable1.load(grida.config.url);

Webix Remote
http://docs.webix.com/desktop__webix_remote_php.html - Webix Remote with PHP

# pass paramter to remote funtion
var result = webix.remote.function1(foo, bar);

# show return value from remote function as webix message
var result = webix.remote.MyClass.select(val1);
result.then((data) => webix.message("msg:" + data));

send data
# post
webix.ajax().post("post.php", {foo:bar});

Webix Jet

DRBD

# cat /etc/drbd.d/global_common.conf 
global {
		usage-count	yes;
}
 
common {
	startup {
		degr-wfc-timeout	0;
	}
 
	net {
		cram-hmac-alg	sha1;
		shared-secret	****************;
	}
 
	disk {
		on-io-error	detach;
	}
}
 
# cat /etc/drbd.d/r0.res 
resource r0 {
	on scld.sedo.de.intern {
		volume 0 {
			device		/dev/drbd0;
			disk		/dev/vg0/lvol0;
			flexible-meta-disk	internal;
		}
		address		192.168.255.1:7788;
	}
	on ubuntu {
		volume 0 {
			device		/dev/drbd0;
			disk		/dev/sda3;
			flexible-meta-disk	internal;
		}
		address		192.168.255.2:7788;
	}

dnsmasq

# cat /etc/dnsmasq.conf
dhcp-authoritative
server=192.168.1.6

log-facility=/var/log/dnsmasq.log
log-queries

local=/example.com/
domain=example.com

# cat /etc/dnsmasq.conf | grep "^dhcp-host" | awk -v OFS="\t" -F "," '{print $3, $2}' | sort -k2 > /etc/hosts.pre
addn-hosts=/etc/hosts.pre

# DHCP
dhcp-range=192.168.1.150,192.168.1.200,255.255.255.0,1d
dhcp-option=option:router,192.168.1.6
dhcp-option=option:ntp-server,217.7.239.199

# PXE
dhcp-boot=undionly.kpxe,srv,192.168.1.9
dhcp-boot=net:sip,http://srv/snom3x0/snom3x0.xml,srv,192.168.1.9

gogs

apt -y install docker-compose

cat < docker-compose.yml
version: "2"

networks:
gitea:
external: false

services:
web:
image: gitea/gitea:latest
environment:
- USER_UID=1000
- USER_GID=1000
- DB_TYPE=mysql
- DB_HOST=db:3306
- DB_NAME=gitea
- DB_USER=gitea
- DB_PASSWD=gitea
restart: always
networks:
- gitea
volumes:
- ./gitea:/data
ports:
- "80:3000"
- "222:22"
depends_on:
- db
db:
image: mysql:latest
restart: always
environment: