Merge pull request #3108 from acmesh-official/ga

support Github Actions
This commit is contained in:
neil 2020-08-17 22:59:46 +08:00 committed by GitHub
commit b6508cccec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 1061 additions and 1042 deletions

16
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,16 @@
name: CI
on: [push, pull_request]
jobs:
formatCheck:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install Shellcheck
run: sudo apt-get install -y shellcheck
- name: DoShellcheck
run: shellcheck -V && shellcheck -e SC2181 **/*.sh && echo "shellcheck OK"
- name: Install shfmt
run: curl -sSL https://github.com/mvdan/sh/releases/download/v3.1.2/shfmt_v3.1.2_linux_amd64 -o ~/shfmt && chmod +x ~/shfmt
- name: shfmt
run: ~/shfmt -l -w -i 2 . ; git diff --exit-code && echo "shfmt OK"

10
acme.sh
View File

@ -1077,11 +1077,11 @@ _isEccKey() {
return 1
fi
[ "$_length" != "1024" ] \
&& [ "$_length" != "2048" ] \
&& [ "$_length" != "3072" ] \
&& [ "$_length" != "4096" ] \
&& [ "$_length" != "8192" ]
[ "$_length" != "1024" ] &&
[ "$_length" != "2048" ] &&
[ "$_length" != "3072" ] &&
[ "$_length" != "4096" ] &&
[ "$_length" != "8192" ]
}
# _createkey 2048|ec-256 file

View File

@ -69,8 +69,8 @@ exim4_deploy() {
cp "$_exim4_conf" "$_backup_conf"
_info "Modify exim4 conf: $_exim4_conf"
if _setopt "$_exim4_conf" "tls_certificate" "=" "$_real_fullchain" \
&& _setopt "$_exim4_conf" "tls_privatekey" "=" "$_real_key"; then
if _setopt "$_exim4_conf" "tls_certificate" "=" "$_real_fullchain" &&
_setopt "$_exim4_conf" "tls_privatekey" "=" "$_real_key"; then
_info "Set config success!"
else
_err "Config exim4 server error, please report bug to us."

View File

@ -195,8 +195,8 @@ then rm -rf \"\$fn\"; echo \"Backup \$fn deleted as older than 180 days\"; fi; d
fi
if [ -n "$Le_Deploy_ssh_cafile" ]; then
_pipe=">"
if [ "$Le_Deploy_ssh_cafile" = "$Le_Deploy_ssh_keyfile" ] \
|| [ "$Le_Deploy_ssh_cafile" = "$Le_Deploy_ssh_certfile" ]; then
if [ "$Le_Deploy_ssh_cafile" = "$Le_Deploy_ssh_keyfile" ] ||
[ "$Le_Deploy_ssh_cafile" = "$Le_Deploy_ssh_certfile" ]; then
# if filename is same as previous file then append.
_pipe=">>"
elif [ "$Le_Deploy_ssh_backup" = "yes" ]; then
@ -222,9 +222,9 @@ then rm -rf \"\$fn\"; echo \"Backup \$fn deleted as older than 180 days\"; fi; d
fi
if [ -n "$Le_Deploy_ssh_fullchain" ]; then
_pipe=">"
if [ "$Le_Deploy_ssh_fullchain" = "$Le_Deploy_ssh_keyfile" ] \
|| [ "$Le_Deploy_ssh_fullchain" = "$Le_Deploy_ssh_certfile" ] \
|| [ "$Le_Deploy_ssh_fullchain" = "$Le_Deploy_ssh_cafile" ]; then
if [ "$Le_Deploy_ssh_fullchain" = "$Le_Deploy_ssh_keyfile" ] ||
[ "$Le_Deploy_ssh_fullchain" = "$Le_Deploy_ssh_certfile" ] ||
[ "$Le_Deploy_ssh_fullchain" = "$Le_Deploy_ssh_cafile" ]; then
# if filename is same as previous file then append.
_pipe=">>"
elif [ "$Le_Deploy_ssh_backup" = "yes" ]; then

View File

@ -65,9 +65,9 @@ vsftpd_deploy() {
cp "$_vsftpd_conf" "$_backup_conf"
_info "Modify vsftpd conf: $_vsftpd_conf"
if _setopt "$_vsftpd_conf" "rsa_cert_file" "=" "$_real_fullchain" \
&& _setopt "$_vsftpd_conf" "rsa_private_key_file" "=" "$_real_key" \
&& _setopt "$_vsftpd_conf" "ssl_enable" "=" "YES"; then
if _setopt "$_vsftpd_conf" "rsa_cert_file" "=" "$_real_fullchain" &&
_setopt "$_vsftpd_conf" "rsa_private_key_file" "=" "$_real_key" &&
_setopt "$_vsftpd_conf" "ssl_enable" "=" "YES"; then
_info "Set config success!"
else
_err "Config vsftpd server error, please report bug to us."

View File

@ -222,10 +222,10 @@ _use_instance_role() {
_use_metadata() {
_aws_creds="$(
_get "$1" "" 1 \
| _normalizeJson \
| tr '{,}' '\n' \
| while read -r _line; do
_get "$1" "" 1 |
_normalizeJson |
tr '{,}' '\n' |
while read -r _line; do
_key="$(echo "${_line%%:*}" | tr -d '"')"
_value="${_line#*:}"
_debug3 "_key" "$_key"
@ -235,8 +235,8 @@ _use_metadata() {
SecretAccessKey) echo "AWS_SECRET_ACCESS_KEY=$_value" ;;
Token) echo "AWS_SESSION_TOKEN=$_value" ;;
esac
done \
| paste -sd' ' -
done |
paste -sd' ' -
)"
_secure_debug "_aws_creds" "$_aws_creds"

View File

@ -220,7 +220,7 @@ _azure_rest() {
export _H2="accept: application/json"
export _H3="Content-Type: application/json"
# clear headers from previous request to avoid getting wrong http code on timeouts
:>"$HTTP_HEADER"
: >"$HTTP_HEADER"
_debug "$ep"
if [ "$m" != "GET" ]; then
_secure_debug2 "data $data"

View File

@ -115,9 +115,9 @@ dns_conoha_rm() {
return 1
fi
record_id=$(printf "%s" "$response" | _egrep_o '{[^}]*}' \
| grep '"type":"TXT"' | grep "\"data\":\"$txtvalue\"" | _egrep_o "\"id\":\"[^\"]*\"" \
| _head_n 1 | cut -d : -f 2 | tr -d \")
record_id=$(printf "%s" "$response" | _egrep_o '{[^}]*}' |
grep '"type":"TXT"' | grep "\"data\":\"$txtvalue\"" | _egrep_o "\"id\":\"[^\"]*\"" |
_head_n 1 | cut -d : -f 2 | tr -d \")
if [ -z "$record_id" ]; then
_err "Can not get record id to remove."
return 1

View File

@ -18,23 +18,23 @@
########
dns_cyon_add() {
_cyon_load_credentials \
&& _cyon_load_parameters "$@" \
&& _cyon_print_header "add" \
&& _cyon_login \
&& _cyon_change_domain_env \
&& _cyon_add_txt \
&& _cyon_logout
_cyon_load_credentials &&
_cyon_load_parameters "$@" &&
_cyon_print_header "add" &&
_cyon_login &&
_cyon_change_domain_env &&
_cyon_add_txt &&
_cyon_logout
}
dns_cyon_rm() {
_cyon_load_credentials \
&& _cyon_load_parameters "$@" \
&& _cyon_print_header "delete" \
&& _cyon_login \
&& _cyon_change_domain_env \
&& _cyon_delete_txt \
&& _cyon_logout
_cyon_load_credentials &&
_cyon_load_parameters "$@" &&
_cyon_print_header "delete" &&
_cyon_login &&
_cyon_change_domain_env &&
_cyon_delete_txt &&
_cyon_logout
}
#########################

View File

@ -67,14 +67,14 @@ _dns_do_list_rrs() {
_err "getRRList origin ${_domain} failed"
return 1
fi
_rr_list="$(echo "${response}" \
| tr -d "\n\r\t" \
| sed -e 's/<item xsi:type="ns2:Map">/\n/g' \
| grep ">$(_regexcape "$fulldomain")</value>" \
| sed -e 's/<\/item>/\n/g' \
| grep '>id</key><value' \
| _egrep_o '>[0-9]{1,16}<' \
| tr -d '><')"
_rr_list="$(echo "${response}" |
tr -d "\n\r\t" |
sed -e 's/<item xsi:type="ns2:Map">/\n/g' |
grep ">$(_regexcape "$fulldomain")</value>" |
sed -e 's/<\/item>/\n/g' |
grep '>id</key><value' |
_egrep_o '>[0-9]{1,16}<' |
tr -d '><')"
[ "${_rr_list}" ]
}
@ -120,10 +120,10 @@ _get_root() {
i=1
_dns_do_soap getDomainList
_all_domains="$(echo "${response}" \
| tr -d "\n\r\t " \
| _egrep_o 'domain</key><value[^>]+>[^<]+' \
| sed -e 's/^domain<\/key><value[^>]*>//g')"
_all_domains="$(echo "${response}" |
tr -d "\n\r\t " |
_egrep_o 'domain</key><value[^>]+>[^<]+' |
sed -e 's/^domain<\/key><value[^>]*>//g')"
while true; do
h=$(printf "%s" "$domain" | cut -d . -f $i-100)

View File

@ -303,10 +303,10 @@ _freedns_domain_id() {
return 1
fi
domain_id="$(echo "$htmlpage" | tr -d " \t\r\n\v\f" | sed 's/<tr>/@<tr>/g' | tr '@' '\n' \
| grep "<td>$search_domain</td>\|<td>$search_domain(.*)</td>" \
| sed -n 's/.*\(edit\.php?edit_domain_id=[0-9a-zA-Z]*\).*/\1/p' \
| cut -d = -f 2)"
domain_id="$(echo "$htmlpage" | tr -d " \t\r\n\v\f" | sed 's/<tr>/@<tr>/g' | tr '@' '\n' |
grep "<td>$search_domain</td>\|<td>$search_domain(.*)</td>" |
sed -n 's/.*\(edit\.php?edit_domain_id=[0-9a-zA-Z]*\).*/\1/p' |
cut -d = -f 2)"
# The above beauty extracts domain ID from the html page...
# strip out all blank space and new lines. Then insert newlines
# before each table row <tr>
@ -349,11 +349,11 @@ _freedns_data_id() {
return 1
fi
data_id="$(echo "$htmlpage" | tr -d " \t\r\n\v\f" | sed 's/<tr>/@<tr>/g' | tr '@' '\n' \
| grep "<td[a-zA-Z=#]*>$record_type</td>" \
| grep "<ahref.*>$search_domain</a>" \
| sed -n 's/.*\(edit\.php?data_id=[0-9a-zA-Z]*\).*/\1/p' \
| cut -d = -f 2)"
data_id="$(echo "$htmlpage" | tr -d " \t\r\n\v\f" | sed 's/<tr>/@<tr>/g' | tr '@' '\n' |
grep "<td[a-zA-Z=#]*>$record_type</td>" |
grep "<ahref.*>$search_domain</a>" |
sed -n 's/.*\(edit\.php?data_id=[0-9a-zA-Z]*\).*/\1/p' |
cut -d = -f 2)"
# The above beauty extracts data ID from the html page...
# strip out all blank space and new lines. Then insert newlines
# before each table row <tr>

View File

@ -69,9 +69,9 @@ dns_gandi_livedns_rm() {
_gandi_livedns_rest PUT \
"domains/$_domain/records/$_sub_domain/TXT" \
"{\"rrset_ttl\": 300, \"rrset_values\": $_new_rrset_values}" \
&& _contains "$response" '{"message": "DNS Record Created"}' \
&& _info "Removing record $(__green "success")"
"{\"rrset_ttl\": 300, \"rrset_values\": $_new_rrset_values}" &&
_contains "$response" '{"message": "DNS Record Created"}' &&
_info "Removing record $(__green "success")"
}
#################### Private functions below ##################################
@ -125,9 +125,9 @@ _dns_gandi_append_record() {
fi
_debug new_rrset_values "$_rrset_values"
_gandi_livedns_rest PUT "domains/$_domain/records/$sub_domain/TXT" \
"{\"rrset_ttl\": 300, \"rrset_values\": $_rrset_values}" \
&& _contains "$response" '{"message": "DNS Record Created"}' \
&& _info "Adding record $(__green "success")"
"{\"rrset_ttl\": 300, \"rrset_values\": $_rrset_values}" &&
_contains "$response" '{"message": "DNS Record Created"}' &&
_info "Adding record $(__green "success")"
}
_dns_gandi_existing_rrset_values() {
@ -145,8 +145,8 @@ _dns_gandi_existing_rrset_values() {
return 1
fi
_debug "Already has TXT record."
_rrset_values=$(echo "$response" | _egrep_o 'rrset_values.*\[.*\]' \
| _egrep_o '\[".*\"]')
_rrset_values=$(echo "$response" | _egrep_o 'rrset_values.*\[.*\]' |
_egrep_o '\[".*\"]')
return 0
}

View File

@ -78,8 +78,8 @@ _dns_gcloud_execute_tr() {
for i in $(seq 1 120); do
if gcloud dns record-sets changes list \
--zone="$managedZone" \
--filter='status != done' \
| grep -q '^.*'; then
--filter='status != done' |
grep -q '^.*'; then
_info "_dns_gcloud_execute_tr: waiting for transaction to be comitted ($i/120)..."
sleep 5
else
@ -137,11 +137,11 @@ _dns_gcloud_find_zone() {
# List domains and find the zone with the deepest sub-domain (in case of some levels of delegation)
if ! match=$(gcloud dns managed-zones list \
--format="value(name, dnsName)" \
--filter="$filter" \
| while read -r dnsName name; do
--filter="$filter" |
while read -r dnsName name; do
printf "%s\t%s\t%s\n" "$(echo "$name" | awk -F"." '{print NF-1}')" "$dnsName" "$name"
done \
| sort -n -r | _head_n 1 | cut -f2,3 | grep '^.*'); then
done |
sort -n -r | _head_n 1 | cut -f2,3 | grep '^.*'); then
_err "_dns_gcloud_find_zone: Can't find a matching managed zone! Perhaps wrong project or gcloud credentials?"
return 1
fi

View File

@ -101,8 +101,8 @@ dns_he_rm() {
body="$body&hosted_dns_editzone=1"
body="$body&hosted_dns_delrecord=1"
body="$body&hosted_dns_delconfirm=delete"
_post "$body" "https://dns.he.net/" \
| grep '<div id="dns_status" onClick="hideThis(this);">Successfully removed record.</div>' \
_post "$body" "https://dns.he.net/" |
grep '<div id="dns_status" onClick="hideThis(this);">Successfully removed record.</div>' \
>/dev/null
exit_code="$?"
if [ "$exit_code" -eq 0 ]; then

View File

@ -123,10 +123,10 @@ _find_record() {
return 1
else
_record_id=$(
echo "$response" \
| grep -o "{[^\{\}]*\"name\":\"$_record_name\"[^\}]*}" \
| grep "\"value\":\"$_record_value\"" \
| while read -r record; do
echo "$response" |
grep -o "{[^\{\}]*\"name\":\"$_record_name\"[^\}]*}" |
grep "\"value\":\"$_record_value\"" |
while read -r record; do
# test for type and
if [ -n "$(echo "$record" | _egrep_o '"type":"TXT"')" ]; then
echo "$record" | _egrep_o '"id":"[^"]*"' | cut -d : -f 2 | tr -d \"

View File

@ -144,7 +144,7 @@ _netlify_rest() {
export _H1="Content-Type: application/json"
export _H2="Authorization: Bearer $token_trimmed"
:>"$HTTP_HEADER"
: >"$HTTP_HEADER"
if [ "$m" != "GET" ]; then
_debug data "$data"

View File

@ -136,11 +136,12 @@ dns_pleskxml_rm() {
# Reduce output to one line per DNS record, filtered for TXT records with a record ID only (which they should all have)
# Also strip out spaces between tags, redundant <data> and </data> group tags and any <self-closing/> tags
reclist="$(_api_response_split "$pleskxml_prettyprint_result" 'result' '<status>ok</status>' \
| sed 's# \{1,\}<\([a-zA-Z]\)#<\1#g;s#</\{0,1\}data>##g;s#<[a-z][^/<>]*/>##g' \
| grep "<site-id>${root_domain_id}</site-id>" \
| grep '<id>[0-9]\{1,\}</id>' \
| grep '<type>TXT</type>'
reclist="$(
_api_response_split "$pleskxml_prettyprint_result" 'result' '<status>ok</status>' |
sed 's# \{1,\}<\([a-zA-Z]\)#<\1#g;s#</\{0,1\}data>##g;s#<[a-z][^/<>]*/>##g' |
grep "<site-id>${root_domain_id}</site-id>" |
grep '<id>[0-9]\{1,\}</id>' |
grep '<type>TXT</type>'
)"
if [ -z "$reclist" ]; then
@ -151,10 +152,11 @@ dns_pleskxml_rm() {
_debug "Got list of DNS TXT records for root domain '$root_domain_name':"
_debug "$reclist"
recid="$(_value "$reclist" \
| grep "<host>${fulldomain}.</host>" \
| grep "<value>${txtvalue}</value>" \
| sed 's/^.*<id>\([0-9]\{1,\}\)<\/id>.*$/\1/'
recid="$(
_value "$reclist" |
grep "<host>${fulldomain}.</host>" |
grep "<value>${txtvalue}</value>" |
sed 's/^.*<id>\([0-9]\{1,\}\)<\/id>.*$/\1/'
)"
if ! _value "$recid" | grep '^[0-9]\{1,\}$' >/dev/null; then
@ -220,11 +222,11 @@ _countdots() {
# Last line could change to <sed -n '/.../p'> instead, with suitable escaping of ['"/$],
# if future Plesk XML API changes ever require extended regex
_api_response_split() {
printf '%s' "$1" \
| sed 's/^ +//;s/ +$//' \
| tr -d '\n\r' \
| sed "s/<\/\{0,1\}$2>/${NEWLINE}/g" \
| grep "$3"
printf '%s' "$1" |
sed 's/^ +//;s/ +$//' |
tr -d '\n\r' |
sed "s/<\/\{0,1\}$2>/${NEWLINE}/g" |
grep "$3"
}
#################### Private functions below (DNS functions) ##################################
@ -265,10 +267,11 @@ _call_api() {
# - filter output to keep only lines like this: "SPACES<TAG>text</TAG>SPACES" (shouldn't be necessary with prettyprint but guarantees subsequent code is ok)
# - then edit the 3 "useful" error tokens individually and remove closing tags on all lines
# - then filter again to remove all lines not edited (which will be the lines not starting A-Z)
errtext="$(_value "$pleskxml_prettyprint_result" \
| grep '^ *<[a-z]\{1,\}>[^<]*<\/[a-z]\{1,\}> *$' \
| sed 's/^ *<status>/Status: /;s/^ *<errcode>/Error code: /;s/^ *<errtext>/Error text: /;s/<\/.*$//' \
| grep '^[A-Z]'
errtext="$(
_value "$pleskxml_prettyprint_result" |
grep '^ *<[a-z]\{1,\}>[^<]*<\/[a-z]\{1,\}> *$' |
sed 's/^ *<status>/Status: /;s/^ *<errcode>/Error code: /;s/^ *<errtext>/Error text: /;s/<\/.*$//' |
grep '^[A-Z]'
)"
fi