diff --git a/google-oauth2.sh b/google-oauth2.sh index 01b9615..0dfd792 100755 --- a/google-oauth2.sh +++ b/google-oauth2.sh @@ -9,6 +9,8 @@ # Set CLIENT_ID and CLIENT_SECRET and SCOPE # See SCOPES at https://developers.google.com/identity/protocols/oauth2/scopes#docsv1 +set -o errexit -o noclobber -o pipefail + _short_help() { printf " No valid arguments provided. @@ -35,10 +37,6 @@ else exit 1 fi -if ! _is_terminal; then - DEBUG="true" - export DEBUG -fi _check_debug _print_center "justify" "Starting script.." "-" @@ -49,8 +47,11 @@ SCOPE="https://www.googleapis.com/auth/drive" REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" TOKEN_URL="https://accounts.google.com/o/oauth2/token" +CONFIG="$(< "${HOME}/.google-drive-upload/google-drive-upload.configpath")" &> /dev/null || : +CONFIG="${CONFIG:-${HOME}/.googledrive.conf}" + # shellcheck source=/dev/null -[[ -f ${HOME}/.googledrive.conf ]] && source "${HOME}"/.googledrive.conf +[[ -f ${CONFIG} ]] && source "${CONFIG}" _print_center "justify" "Checking credentials.." "-" @@ -77,7 +78,7 @@ if [[ ${1} = create ]]; then CODE="${CODE//[[:space:]]/}" if [[ -n ${CODE} ]]; then - RESPONSE="$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" ${TOKEN_URL})" + RESPONSE="$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" ${TOKEN_URL})" || : ACCESS_TOKEN="$(_json_value access_token 1 1 <<< "${RESPONSE}")" REFRESH_TOKEN="$(_json_value refresh_token 1 1 <<< "${RESPONSE}")" @@ -102,9 +103,9 @@ elif [[ ${1} = refresh ]]; then # Make a request on https://www.googleapis.com/oauth2/""${API_VERSION}""/tokeninfo?access_token=${ACCESS_TOKEN} url and check if the given token is valid, if not generate one. # Requirements: Refresh Token _get_token_and_update() { - RESPONSE="$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")" + RESPONSE="$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")" || : ACCESS_TOKEN="$(_json_value access_token 1 1 <<< "${RESPONSE}")" - ACCESS_TOKEN_EXPIRY="$(curl --compressed -s "${API_URL}/oauth2/${API_VERSION}/tokeninfo?access_token=${ACCESS_TOKEN}" | _json_value exp 1 1)" + ACCESS_TOKEN_EXPIRY="$(curl --compressed -s "${API_URL}/oauth2/${API_VERSION}/tokeninfo?access_token=${ACCESS_TOKEN}" | _json_value exp 1 1)" || : "${UPDATE:-:}" ACCESS_TOKEN "${ACCESS_TOKEN}" "${CONFIG}" "${UPDATE:-:}" ACCESS_TOKEN_EXPIRY "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}" } @@ -115,5 +116,6 @@ elif [[ ${1} = refresh ]]; then printf "Access Token: %s\n" "${ACCESS_TOKEN}" else _print_center "normal" "Refresh Token not set, use ${0} create to generate one." "=" + exit 1 fi fi diff --git a/install.sh b/install.sh index c3cacc5..cbdea6f 100755 --- a/install.sh +++ b/install.sh @@ -238,14 +238,16 @@ _get_latest_sha() { declare LATEST_SHA case "${1:-${TYPE}}" in branch) - LATEST_SHA="$(hash="$(curl --compressed -s https://github.com/"${3:-${REPO}}"/commits/"${2:-${TYPE_VALUE}}".atom -r 0-2000 | grep "Commit\\/" -m1)" && { + LATEST_SHA="$( + hash="$(curl --compressed -s https://github.com/"${3:-${REPO}}"/commits/"${2:-${TYPE_VALUE}}".atom -r 0-2000 | grep "Commit\\/" -m1 || :)" read -r firstline <<< "${hash}" && regex="(/.*<)" && [[ ${firstline} =~ ${regex} ]] && printf "%s\n" "${BASH_REMATCH[1]:1:-1}" - })" + )" ;; release) - LATEST_SHA="$(hash="$(curl -L --compressed -s https://github.com/"${3:-${REPO}}"/releases/"${2:-${TYPE_VALUE}}" | grep "=\"/""${3:-${REPO}}""/commit" -m1)" && { + LATEST_SHA="$( + hash="$(curl -L --compressed -s https://github.com/"${3:-${REPO}}"/releases/"${2:-${TYPE_VALUE}}" | grep "=\"/""${3:-${REPO}}""/commit" -m1 || :)" read -r firstline <<< "${hash}" && : "${hash/*commit\//}" && printf "%s\n" "${_/\"*/}" - })" + )" ;; esac printf "%b" "${LATEST_SHA:+${LATEST_SHA}\n}" @@ -308,7 +310,7 @@ _json_value() { declare LC_ALL=C num { [[ ${2} =~ ^([0-9]+)+$ ]] && no_of_lines="${2}"; } || : { [[ ${3} =~ ^([0-9]+)+$ ]] && num="${3}"; } || { [[ ${3} != all ]] && num=1; } - grep -o "\"${1}\"\:.*" ${no_of_lines+-m ${no_of_lines}} | sed -e "s/.*\"""${1}""\"://" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num}"p + grep -o "\"${1}\"\:.*" ${no_of_lines:+-m ${no_of_lines}} | sed -e "s/.*\"""${1}""\"://" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num}"p || : } ################################################### @@ -764,6 +766,7 @@ _setup_arguments() { main() { _check_bash_version && _check_dependencies + set -o errexit -o noclobber -o pipefail _variables if [[ $* ]]; then diff --git a/sync.sh b/sync.sh index a6d1e99..10171ef 100755 --- a/sync.sh +++ b/sync.sh @@ -27,12 +27,12 @@ Options:\n -s | --service 'service name' - To generate systemd service file to setup background jobs on boot.\n -D | --debug - Display script command trace, use before all the flags to see maximum script trace.\n -h | --help - Display usage instructions.\n" "${0##*/}" "${0##*/}" "${0##*/}" "${0##*/}" - exit + exit 0 } _short_help() { printf "No valid arguments provided, use -h/--help flag to see usage.\n" - exit + exit 0 } ################################################### @@ -62,28 +62,28 @@ _get_job_info() { pid="${1}" input="${3:-$(grep "${pid}" "${SYNC_LIST}" || :)}" if [[ -n ${input} ]]; then - if _check_pid "${pid}"; then + if times="$(ps -p "${pid}" -o etimes --no-headers)"; then printf "\n%s\n" "PID: ${pid}" : "${input#*"|:_//_:|"}" && local_folder="${_/"|:_//_:|"*/}" printf "Local Folder: %s\n" "${local_folder}" printf "Drive Folder: %s\n" "${input/*"|:_//_:|"/}" - times="$(ps -p "${pid}" -o etimes --no-headers)" printf "Running Since: %s\n" "$(_display_time "${times}")" if [[ -n ${2} ]]; then - extra="$(ps -p "${pid}" -o %cpu,%mem --no-headers)" + extra="$(ps -p "${pid}" -o %cpu,%mem --no-headers || :)" printf "CPU usage:%s\n" "${extra% *}" printf "Memory usage: %s\n" "${extra##* }" _setup_loop_variables "${local_folder}" "${input/*"|:_//_:|"/}" printf "Success: %s\n" "$(_count < "${SUCCESS_LOG}")" printf "Failed: %s\n" "$(_count < "${ERROR_LOG}")" fi - return 0 + return_status=0 else - return 1 + return_status=1 fi else - return 11 + return_status=11 fi + return 0 } ################################################### @@ -97,16 +97,17 @@ _get_job_info() { ################################################### _remove_job() { declare pid="${1}" input local_folder drive_folder - input="$(grep "${pid}" "${SYNC_LIST}")" + input="$(grep "${pid}" "${SYNC_LIST}" || :)" : "${input#*"|:_//_:|"}" && local_folder="${_/"|:_//_:|"*/}" drive_folder="${input/*"|:_//_:|"/}" - new_list="$(grep -v "${pid}" "${SYNC_LIST}")" + new_list="$(grep -v "${pid}" "${SYNC_LIST}" || :)" printf "%s\n" "${new_list}" >| "${SYNC_LIST}" rm -rf "${SYNC_DETAIL_DIR:?}/${drive_folder}${local_folder}" # Cleanup dir if empty if find "${SYNC_DETAIL_DIR:?}/${drive_folder}" -type f &> /dev/null; then rm -rf "${SYNC_DETAIL_DIR:?}/${drive_folder}" fi + return 0 } ################################################### @@ -119,7 +120,7 @@ _remove_job() { ################################################### _kill_job() { declare pid="${1}" - kill -9 "${pid}" &> /dev/null + kill -9 "${pid}" &> /dev/null || : _remove_job "${pid}" printf "Killed.\n" } @@ -141,11 +142,12 @@ _show_jobs() { if [[ -n ${line} ]]; then : "${line/"|:_//_:|"*/}" && pid="${_/*: /}" _get_job_info "${pid}" "${1}" "${line}" - { [[ ${?} = 1 ]] && _remove_job "${pid}"; } || { ((total += 1)) && no_task="printf"; } + { [[ ${return_status} = 1 ]] && _remove_job "${pid}"; } || { ((total += 1)) && no_task="printf"; } fi done 4< "${SYNC_LIST}" printf "\nTotal Jobs Running: %s\n" "${total}" - [[ v${1} = v ]] && "${no_task:-:}" "For more info: %s -j/--jobs v/verbose\n" "${0##*/}" + [[ -n ${1} ]] && "${no_task:-:}" "For more info: %s -j/--jobs v/verbose\n" "${0##*/}" + return 0 } ################################################### @@ -201,6 +203,7 @@ _check_and_upload() { else all+=(*) fi + mapfile -t final <<< "$(_remove_array_duplicates "${all[@]}")" mapfile -t new_files <<< "$(diff \ @@ -252,15 +255,16 @@ _check_existing_loop() { _setup_loop_variables "${FOLDER}" "${GDRIVE_FOLDER}" _setup_loop_files if [[ -z ${PID} ]]; then - return 0 + return_status=0 elif _check_pid "${PID}"; then - return 1 + return_status=1 else _remove_job "${PID}" _setup_loop_variables "${FOLDER}" "${GDRIVE_FOLDER}" _setup_loop_files - return 2 + return_status=2 fi + return 0 } ################################################### @@ -284,8 +288,9 @@ _start_new_loop() { printf "%b\n" "Job started.\nLocal Folder: ${INPUT}\nDrive Folder: ${GDRIVE_FOLDER}" printf "%s\n" "PID: ${PID}" printf "%b\n" "PID: ${PID}|:_//_:|${FOLDER}|:_//_:|${GDRIVE_FOLDER}" >> "${SYNC_LIST}" - { [[ -n ${SHOW_LOGS} ]] && tail -f "${LOGS}"; } || : + [[ -n ${SHOW_LOGS} ]] && tail -f "${LOGS}" fi + return 0 } ################################################### @@ -321,9 +326,8 @@ _do_job() { for pid in "${ALL_PIDS[@]}"; do if [[ ${JOB_TYPE} =~ INFO ]]; then _get_job_info "${pid}" more - status="${?}" - if [[ ${status} != 0 ]]; then - { [[ ${status} = 1 ]] && _remove_job "${pid}"; } || : + if [[ ${return_status} != 0 ]]; then + [[ ${return_status} = 1 ]] && _remove_job "${pid}" printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 fi fi @@ -341,11 +345,10 @@ _do_job() { fi if [[ ${JOB_TYPE} =~ KILL ]]; then _get_job_info "${pid}" - status="${?}" - if [[ ${status} = 0 ]]; then + if [[ ${return_status} = 0 ]]; then _kill_job "${pid}" else - { [[ ${status} = 1 ]] && _remove_job "${pid}"; } || : + [[ ${return_status} = 1 ]] && _remove_job "${pid}" printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 fi fi @@ -355,6 +358,7 @@ _do_job() { fi ;; esac + return 0 } ################################################### @@ -370,12 +374,13 @@ _do_job() { ################################################### _setup_arguments() { [[ $# = 0 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare -g SYNC_TIME_TO_SLEEP ARGS COMMAND_NAME DEBUG GDRIVE_FOLDER KILL SHOW_LOGS + unset SYNC_TIME_TO_SLEEP ARGS COMMAND_NAME DEBUG GDRIVE_FOLDER KILL SHOW_LOGS _check_longoptions() { - { [[ -z ${2} ]] && + [[ -z ${2} ]] && printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' \ - "${0##*/}" "${1}" "${0##*/}" && exit 1; } || : + "${0##*/}" "${1}" "${0##*/}" && exit 1 + return 0 } while [[ $# -gt 0 ]]; do @@ -393,7 +398,7 @@ _setup_arguments() { ARGS+=" -C ${GDRIVE_FOLDER} " ;; -j | --jobs) - { [[ ${2} = v* ]] && SHOW_JOBS_VERBOSE="true" && shift; } || : + [[ ${2} = v* ]] && SHOW_JOBS_VERBOSE="true" && shift JOB=(SHOW_JOBS) ;; -p | --pid) @@ -424,7 +429,7 @@ _setup_arguments() { -t | --time) _check_longoptions "${1}" "${2}" if [[ ${2} =~ ^([0-9]+)+$ ]]; then - { [[ ${2} = default* ]] && UPDATE_DEFAULT_TIME_TO_SLEEP="_update_config"; } || : + [[ ${2} = default* ]] && UPDATE_DEFAULT_TIME_TO_SLEEP="_update_config" TO_SLEEP="${2/default=/}" && shift else printf "-t/--time only takes positive integers as arguments, min = 1, max = infinity.\n" @@ -433,7 +438,7 @@ _setup_arguments() { ;; -a | --arguments) _check_longoptions "${1}" "${2}" - { [[ ${2} = default* ]] && UPDATE_DEFAULT_ARGS="_update_config"; } || : + [[ ${2} = default* ]] && UPDATE_DEFAULT_ARGS="_update_config" ARGS+="${2/default=/} " && shift ;; -fg | --foreground) @@ -489,6 +494,7 @@ _setup_arguments() { fi mapfile -t FINAL_INPUT_ARRAY <<< "$(_remove_array_duplicates "${FINAL_INPUT_ARRAY[@]}")" + return 0 } ################################################### @@ -502,7 +508,7 @@ _setup_arguments() { # source CONFIG, update default values if required ################################################### _config_variables() { - if [[ -f "${INFO_PATH}/google-drive-upload.info" ]]; then + if [[ -r "${INFO_PATH}/google-drive-upload.info" ]]; then # shellcheck source=/dev/null source "${INFO_PATH}/google-drive-upload.info" else @@ -532,6 +538,7 @@ _config_variables() { ARGS+=" ${SYNC_DEFAULT_ARGS:-} " "${UPDATE_DEFAULT_ARGS:-:}" SYNC_DEFAULT_ARGS " ${ARGS} " "${CONFIG}" "${UPDATE_DEFAULT_TIME_TO_SLEEP:-:}" SYNC_TIME_TO_SLEEP "${SYNC_TIME_TO_SLEEP}" "${CONFIG}" + return 0 } ################################################### @@ -613,8 +620,7 @@ done' fi cd "${FOLDER}" || exit 1 _check_existing_loop - status="$?" - case "${status}" in + case "${return_status}" in 0 | 2) _start_new_loop ;; @@ -630,16 +636,19 @@ done' _kill_job "${PID}" exit fi - { [[ -n ${SHOW_LOGS} ]] && tail -f "${LOGS}"; } || : + [[ -n ${SHOW_LOGS} ]] && tail -f "${LOGS}" ;; esac cd "${CURRENT_FOLDER}" || exit 1 done + return 0 } main() { [[ $# = 0 ]] && _short_help + set -o errexit -o noclobber -o pipefail + UTILS_FILE="${UTILS_FILE:-./utils.sh}" if [[ -r ${UTILS_FILE} ]]; then # shellcheck source=/dev/null diff --git a/upload.sh b/upload.sh index 7199f01..3e5921c 100755 --- a/upload.sh +++ b/upload.sh @@ -48,10 +48,10 @@ _short_help() { ################################################### _update() { declare job="${1:-update}" - { [[ ${job} =~ uninstall ]] && job_string="--uninstall"; } || : + [[ ${job} =~ uninstall ]] && job_string="--uninstall" _print_center "justify" "Fetching ${job} script.." "-" # shellcheck source=/dev/null - if [[ -f "${HOME}/.google-drive-upload/google-drive-upload.info" ]]; then + if [[ -r "${HOME}/.google-drive-upload/google-drive-upload.info" ]]; then source "${HOME}/.google-drive-upload/google-drive-upload.info" fi declare repo="${REPO:-labbots/google-drive-upload}" type_value="${TYPE_VALUE:-latest}" @@ -86,7 +86,7 @@ _update() { # Result: read description ################################################### _version_info() { - if [[ -f "${HOME}/.google-drive-upload/google-drive-upload.info" ]]; then + if [[ -r "${HOME}/.google-drive-upload/google-drive-upload.info" ]]; then printf "%s\n" "$(< "${HOME}/.google-drive-upload/google-drive-upload.info")" else _print_center "justify" "google-drive-upload is not installed system wide." "=" @@ -116,9 +116,10 @@ _drive_info() { search_response="$(curl --compressed -s \ -H "Authorization: Bearer ${token}" \ - "${API_URL}/drive/${API_VERSION}/files/${folder_id}?fields=${fetch}&supportsAllDrives=true")" + "${API_URL}/drive/${API_VERSION}/files/${folder_id}?fields=${fetch}&supportsAllDrives=true")" || : printf "%s\n" "${search_response}" + return 0 } ################################################### @@ -143,13 +144,14 @@ _check_existing_file() { search_response="$(curl --compressed -s \ -H "Authorization: Bearer ${token}" \ - "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id)&supportsAllDrives=true")" + "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id)&supportsAllDrives=true")" || : id="$(_json_value id 1 1 <<< "${search_response}")" { [[ -z ${id} ]] && _json_value message 1 1 <<< "${search_response}" 1>&2 && return 1; } || { printf "%s\n" "${id}" } + return 0 } ################################################### @@ -174,7 +176,7 @@ _create_directory() { search_response="$(curl --compressed -s \ -H "Authorization: Bearer ${token}" \ - "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id)&supportsAllDrives=true")" + "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id)&supportsAllDrives=true")" || : folder_id="$(printf "%s\n" "${search_response}" | _json_value id 1 1)" @@ -186,12 +188,13 @@ _create_directory() { -H "Authorization: Bearer ${token}" \ -H "Content-Type: application/json; charset=UTF-8" \ -d "${create_folder_post_data}" \ - "${API_URL}/drive/${API_VERSION}/files?fields=id&supportsAllDrives=true")" + "${API_URL}/drive/${API_VERSION}/files?fields=id&supportsAllDrives=true")" || : folder_id="$(_json_value id 1 1 <<< "${create_folder_response}")" fi { [[ -z ${folder_id} ]] && _json_value id 1 1 <<< "${create_folder_response}" 1>&2 && return 1; } || { printf "%s\n" "${folder_id}" } + return 0 } ################################################### @@ -243,6 +246,24 @@ _upload_file() { UPLOAD_STATUS="ERROR" && export UPLOAD_STATUS # Send a error status, used in folder uploads. } + _collect_file_info() { + FILE_ID="${1:-$(printf "%s\n" "${upload_body}" | _json_value id 1 1)}" + FILE_LINK="${FILE_ID/*/https://drive.google.com/open?id=${FILE_ID}}" + # Log to the filename provided with -i/--save-id flag. + if [[ -n ${LOG_FILE_ID} && ! -d ${LOG_FILE_ID} ]]; then + # shellcheck disable=SC2129 + # https://github.com/koalaman/shellcheck/issues/1202#issuecomment-608239163 + { + printf "%s\n" "Link: ${FILE_LINK}" + : "$(printf "%s\n" "${upload_body}" | _json_value name 1 1)" && printf "%s\n" "${_/*/Name: $_}" + : "$(printf "%s\n" "${FILE_ID}")" && printf "%s\n" "${_/*/ID: $_}" + : "$(printf "%s\n" "${upload_body}" | _json_value mimeType 1 1)" && printf "%s\n" "${_/*/Type: $_}" + printf '\n' + } >> "${LOG_FILE_ID}" + fi + return 0 + } + # Set proper variables for overwriting files if [[ ${job} = update ]]; then declare existing_file_id @@ -253,7 +274,6 @@ _upload_file() { if [[ -n ${existing_file_id} ]]; then if [[ -n ${SKIP_DUPLICATES} ]]; then SKIP_DUPLICATES_FILE_ID="${existing_file_id}" - FILE_LINK="${SKIP_DUPLICATES_FILE_ID/${SKIP_DUPLICATES_FILE_ID}/https://drive.google.com/open?id=${SKIP_DUPLICATES_FILE_ID}}" else request_method="PATCH" url="${API_URL}/upload/drive/${API_VERSION}/files/${existing_file_id}?uploadType=resumable&supportsAllDrives=true" @@ -268,6 +288,7 @@ _upload_file() { if [[ -n ${SKIP_DUPLICATES_FILE_ID} ]]; then # Stop upload if already exists ( -d/--skip-duplicates ) + _collect_file_info "${SKIP_DUPLICATES_FILE_ID}" "${QUIET:-_print_center}" "justify" "${slug}" " already exists." "=" else # Set proper variables for creating files @@ -290,8 +311,9 @@ _upload_file() { -H "X-Upload-Content-Length: ${inputsize}" \ -d "$postdata" \ "${url}" \ - -D -)" + -D -)" || : uploadlink="$(read -r firstline <<< "${uploadlink/*[L,l]ocation: /}" && printf "%s\n" "${firstline//$'\r'/}")" + return 0 } # Curl command to push the file to google drive. @@ -308,24 +330,8 @@ _upload_file() { -o- \ --url "${uploadlink}" \ --globoff \ - ${CURL_ARGS})" - } - - _collect_file_info() { - FILE_LINK="$(: "$(printf "%s\n" "${upload_body}" | _json_value id 1 1)" && printf "%s\n" "${_/$_/https://drive.google.com/open?id=$_}")" - FILE_ID="$(printf "%s\n" "${upload_body}" | _json_value id 1 1)" - # Log to the filename provided with -i/--save-id flag. - if [[ -n ${LOG_FILE_ID} && ! -d ${LOG_FILE_ID} ]]; then - # shellcheck disable=SC2129 - # https://github.com/koalaman/shellcheck/issues/1202#issuecomment-608239163 - { - printf "%s\n" "Link: ${FILE_LINK}" - : "$(printf "%s\n" "${upload_body}" | _json_value name 1 1)" && printf "%s\n" "${_/*/Name: $_}" - : "$(printf "%s\n" "${FILE_ID}")" && printf "%s\n" "${_/*/ID: $_}" - : "$(printf "%s\n" "${upload_body}" | _json_value mimeType 1 1)" && printf "%s\n" "${_/*/Type: $_}" - printf '\n' - } >> "${LOG_FILE_ID}" - fi + ${CURL_ARGS})" || : + return 0 } _normal_logging() { @@ -333,15 +339,18 @@ _upload_file() { for _ in {1..3}; do _clear_line 1; done fi "${QUIET:-_print_center}" "justify" "${slug} " "| ${readable_size} | ${string}" "=" + return 0 } # Used for resuming interrupted uploads _log_upload_session() { - { [[ ${inputsize} -gt 1000000 ]] && printf "%s\n" "${uploadlink}" >| "${__file}"; } || : + [[ ${inputsize} -gt 1000000 ]] && printf "%s\n" "${uploadlink}" >| "${__file}" + return 0 } _remove_upload_session() { rm -f "${__file}" + return 0 } _full_upload() { @@ -359,20 +368,21 @@ _upload_file() { else _error_logging fi + return 0 } __file="${HOME}/.google-drive-upload/${slug}__::__${folder_id}__::__${inputsize}" # https://developers.google.com/drive/api/v3/manage-uploads if [[ -r "${__file}" ]]; then uploadlink="$(< "${__file}")" - http_code="$(curl --compressed -s -X PUT "${uploadlink}" --write-out %"{http_code}")" + http_code="$(curl --compressed -s -X PUT "${uploadlink}" --write-out %"{http_code}")" || : if [[ ${http_code} = "308" ]]; then # Active Resumable URI give 308 status uploaded_range="$(: "$(curl --compressed -s \ -X PUT \ -H "Content-Range: bytes */${inputsize}" \ --url "${uploadlink}" \ --globoff \ - -D -)" && : "$(printf "%s\n" "${_/*[R,r]ange: bytes=0-/}")" && read -r firstline <<< "$_" && printf "%s\n" "${firstline//$'\r'/}")" + -D - || :)" && : "$(printf "%s\n" "${_/*[R,r]ange: bytes=0-/}")" && read -r firstline <<< "$_" && printf "%s\n" "${firstline//$'\r'/}")" if [[ ${uploaded_range} =~ (^[0-9]) ]]; then content_range="$(printf "bytes %s-%s/%s\n" "$((uploaded_range + 1))" "$((inputsize - 1))" "${inputsize}")" content_length="$((inputsize - $((uploaded_range + 1))))" @@ -419,6 +429,7 @@ _upload_file() { _full_upload fi fi + return 0 } ################################################### @@ -443,7 +454,7 @@ _clone_file() { [[ $# -lt 4 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 declare job="${1}" file_id="${2}" file_root_id="${3}" token="${4}" name="${5}" size="${6}" declare clone_file_post_data clone_file_response string readable_size - { [[ -z ${parallel} ]] && CURL_ARGS="-s"; } || : + [[ -z ${parallel} ]] && CURL_ARGS="-s" if [[ ${job} = update ]]; then declare existing_file_id # Check if file actually exists. @@ -459,10 +470,10 @@ _clone_file() { _print_center "justify" "Overwriting file.." "-" clone_file_post_data="$(_drive_info "${existing_file_id}" "parents,writersCanShare" "${token}")" if [[ ${existing_file_id} != "${file_id}" ]]; then - curl --compressed \ + curl -s --compressed \ -X DELETE \ -H "Authorization: Bearer ${token}" \ - "${API_URL}/drive/${API_VERSION}/files/${existing_file_id}?supportsAllDrives=true" &> /dev/null + "${API_URL}/drive/${API_VERSION}/files/${existing_file_id}?supportsAllDrives=true" &> /dev/null || : string="Updated" else FILE_ID="${existing_file_id}" @@ -470,26 +481,26 @@ _clone_file() { fi fi else - { [[ -z ${parallel} ]] && _print_center "justify" "Cloning file.." "-"; } || : + [[ -z ${parallel} ]] && _print_center "justify" "Cloning file.." "-" clone_file_post_data="{\"parents\": [\"${file_root_id}\"]}" string="Cloned" fi else - { [[ -z ${parallel} ]] && _print_center "justify" "Cloning file.." "-"; } || : + [[ -z ${parallel} ]] && _print_center "justify" "Cloning file.." "-" clone_file_post_data="{\"parents\": [\"${file_root_id}\"]}" string="Cloned" fi readable_size="$(_bytes_to_human "${size}")" - { [[ -z ${parallel} ]] && _print_center "justify" "${name} " "| ${readable_size}" "="; } || : + [[ -z ${parallel} ]] && _print_center "justify" "${name} " "| ${readable_size}" "=" clone_file_response="$(curl --compressed \ -X POST \ -H "Authorization: Bearer ${token}" \ -H "Content-Type: application/json; charset=UTF-8" \ -d "${clone_file_post_data}" \ "${API_URL}/drive/${API_VERSION}/files/${file_id}/copy?supportsAllDrives=true" \ - ${CURL_ARGS})" - { [[ -z ${parallel} ]] && for _ in {1..2}; do _clear_line 1; done; } || : + ${CURL_ARGS})" || : + [[ -z ${parallel} ]] && for _ in {1..2}; do _clear_line 1; done if [[ -n ${clone_file_response} ]]; then FILE_LINK="$(: "$(printf "%s\n" "${clone_file_response}" | _json_value id 1 1)" && printf "%s\n" "${_/$_/https://drive.google.com/open?id=$_}")" FILE_ID="$(printf "%s\n" "${clone_file_response}" | _json_value id 1 1)" @@ -510,6 +521,7 @@ _clone_file() { "${QUIET:-_print_center}" "justify" "ERROR" ", ${slug} not ${string}." "=" 1>&2 && [[ -z ${parallel} ]] && printf "\n\n\n" 1>&2 UPLOAD_STATUS="ERROR" && export UPLOAD_STATUS # Send a error status, used in folder uploads. fi + return 0 } ################################################### @@ -538,12 +550,13 @@ _share_id() { else share_post_data="{\"role\":\"${role}\",\"type\":\"${type}\"}" fi + share_response="$(curl --compressed -s \ -X POST \ -H "Authorization: Bearer ${token}" \ -H "Content-Type: application/json; charset=UTF-8" \ -d "${share_post_data}" \ - "${API_URL}/drive/${API_VERSION}/files/${id}/permissions")" + "${API_URL}/drive/${API_VERSION}/files/${id}/permissions")" || : share_id="$(_json_value id 1 1 <<< "${share_response}")" _clear_line 1 @@ -573,13 +586,14 @@ _setup_arguments() { CURL_ARGS="-#" INFO_PATH="${HOME}/.google-drive-upload" CONFIG="$(< "${INFO_PATH}/google-drive-upload.configpath")" &> /dev/null || : + CONFIG="${CONFIG:-${HOME}/.googledrive.conf}" # Grab the first and second argument ( if 1st argument isn't a drive url ) and shift, only if ${1} doesn't contain -. if [[ ${1} != -* ]]; then if [[ ${1} =~ (drive.google.com|docs.google.com) ]]; then - { ID_INPUT_ARRAY+=("$(_extract_id "${1}")") && shift && [[ ${1} != -* ]] && FOLDER_INPUT="${1}" && shift; } || : + ID_INPUT_ARRAY+=("$(_extract_id "${1}")") && shift && [[ ${1} != -* ]] && FOLDER_INPUT="${1}" && shift else - { LOCAL_INPUT_ARRAY+=("${1}") && shift && [[ ${1} != -* ]] && FOLDER_INPUT="${1}" && shift; } || : + LOCAL_INPUT_ARRAY+=("${1}") && shift && [[ ${1} != -* ]] && FOLDER_INPUT="${1}" && shift fi fi @@ -592,15 +606,18 @@ _setup_arguments() { TOKEN_URL="https://accounts.google.com/o/oauth2/token" _check_config() { - { [[ ${1} = default* ]] && UPDATE_DEFAULT_CONFIG="true"; } || : + [[ ${1} = default* ]] && UPDATE_DEFAULT_CONFIG="true" { [[ -r ${2} ]] && CONFIG="${2}"; } || { printf "Error: Given config file (%s) doesn't exist/not readable,..\n" "${1}" 1>&2 && exit 1 } + return 0 } _check_longoptions() { - { [[ -z ${2} ]] && - printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && exit 1; } || : + [[ -z ${2} ]] && + printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && + exit 1 + return 0 } while [[ $# -gt 0 ]]; do @@ -628,7 +645,7 @@ _setup_arguments() { -r | --root-dir) _check_longoptions "${1}" "${2}" ROOTDIR="${2/default=/}" - { [[ ${2} = default* ]] && UPDATE_DEFAULT_ROOTDIR="_update_config"; } || : + [[ ${2} = default* ]] && UPDATE_DEFAULT_ROOTDIR="_update_config" shift ;; -z | --config) @@ -652,7 +669,7 @@ _setup_arguments() { exit 1 ;; *) - [[ ${NO_OF_PARALLEL_JOBS} -gt 10 ]] && { NO_OF_PARALLEL_JOBS=10 || NO_OF_PARALLEL_JOBS="${2}"; } + { [[ ${NO_OF_PARALLEL_JOBS} -gt 10 ]] && NO_OF_PARALLEL_JOBS=10; } || NO_OF_PARALLEL_JOBS="${2}" ;; esac PARALLEL_UPLOAD="true" && shift @@ -740,11 +757,13 @@ _setup_arguments() { fi # Get foldername, prioritise the input given by -C/--create-dir option. - { [[ -n ${FOLDER_INPUT} && -z ${FOLDERNAME} ]] && FOLDERNAME="${FOLDER_INPUT}"; } || : + [[ -n ${FOLDER_INPUT} && -z ${FOLDERNAME} ]] && FOLDERNAME="${FOLDER_INPUT}" + + [[ -n ${VERBOSE_PROGRESS} && -n ${VERBOSE} ]] && unset "${VERBOSE}" - { [[ -n ${VERBOSE_PROGRESS} && -n ${VERBOSE} ]] && unset "${VERBOSE}"; } || : + [[ -n ${QUIET} ]] && CURL_ARGS="-s" - { [[ -n ${QUIET} ]] && CURL_ARGS="-s"; } || : + return 0 } ################################################### @@ -756,8 +775,9 @@ _setup_arguments() { # Result: read description ################################################### _setup_tempfile() { - type -p mktemp &> /dev/null && { TMPFILE="$(mktemp -u)" || TMPFILE="${PWD}/$((RANDOM * 2)).LOG"; } - trap 'rm -f "${TMPFILE}"*' EXIT + { type -p mktemp &> /dev/null && TMPFILE="$(mktemp -u)"; } || TMPFILE="${PWD}/$((RANDOM * 2)).LOG" + trap 'rm -f "${TMPFILE}"* ; exit' INT TERM + return 0 } ################################################### @@ -805,7 +825,7 @@ _check_credentials() { CODE="${CODE//[[:space:]]/}" if [[ -n ${CODE} ]]; then RESPONSE="$(curl --compressed -s -X POST \ - --data "code=${CODE}&client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&redirect_uri=${REDIRECT_URI}&grant_type=authorization_code" "${TOKEN_URL}")" + --data "code=${CODE}&client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&redirect_uri=${REDIRECT_URI}&grant_type=authorization_code" "${TOKEN_URL}")" || : ACCESS_TOKEN="$(_json_value access_token 1 1 <<< "${RESPONSE}")" REFRESH_TOKEN="$(_json_value refresh_token 1 1 <<< "${RESPONSE}")" @@ -830,7 +850,7 @@ _check_credentials() { # Make a request on https://www.googleapis.com/oauth2/""${API_VERSION}""/tokeninfo?access_token=${ACCESS_TOKEN} url and check if the given token is valid, if not generate one. # Requirements: Refresh Token _get_token_and_update() { - RESPONSE="$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")" + RESPONSE="$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")" || : ACCESS_TOKEN="$(_json_value access_token 1 1 <<< "${RESPONSE}")" if [[ -n ${ACCESS_TOKEN} ]]; then ACCESS_TOKEN_EXPIRY="$(curl --compressed -s "${API_URL}/oauth2/${API_VERSION}/tokeninfo?access_token=${ACCESS_TOKEN}" | _json_value exp 1 1)" @@ -841,10 +861,12 @@ _check_credentials() { printf "%s\n" "${RESPONSE}" 1>&2 exit 1 fi + return 0 } if [[ -z ${ACCESS_TOKEN} || ${ACCESS_TOKEN_EXPIRY} -lt "$(printf "%(%s)T\\n" "-1")" ]]; then _get_token_and_update fi + return 0 } ################################################### @@ -872,17 +894,19 @@ _setup_root_dir() { fi ROOT_FOLDER="$(_json_value id 1 1 <<< "${json}")" "${1:-:}" ROOT_FOLDER "${ROOT_FOLDER}" "${CONFIG}" + return 0 } _update_root_id_name() { ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" "${ACCESS_TOKEN}" | _json_value name)" "${1:-:}" ROOT_FOLDER_NAME "${ROOT_FOLDER_NAME}" "${CONFIG}" + return 0 } - { [[ -n ${ROOT_FOLDER} && -z ${ROOT_FOLDER_NAME} ]] && _update_root_id_name _update_config; } || : + [[ -n ${ROOT_FOLDER} && -z ${ROOT_FOLDER_NAME} ]] && _update_root_id_name _update_config if [[ -n ${ROOTDIR:-} ]]; then ROOT_FOLDER="${ROOTDIR//[[:space:]]/}" - { [[ -n ${ROOT_FOLDER} ]] && _check_root_id "${UPDATE_DEFAULT_ROOTDIR}"; } || : + [[ -n ${ROOT_FOLDER} ]] && _check_root_id "${UPDATE_DEFAULT_ROOTDIR}" elif [[ -z ${ROOT_FOLDER} ]]; then read -r -p "Root Folder ID or URL (Default: root): " ROOT_FOLDER ROOT_FOLDER="${ROOT_FOLDER//[[:space:]]/}" @@ -894,7 +918,8 @@ _setup_root_dir() { fi fi - { [[ -z ${ROOT_FOLDER_NAME} ]] && _update_root_id_name "${UPDATE_DEFAULT_ROOTDIR}"; } || : + [[ -z ${ROOT_FOLDER_NAME} ]] && _update_root_id_name "${UPDATE_DEFAULT_ROOTDIR}" + return 0 } ################################################### @@ -919,6 +944,7 @@ _setup_workspace() { _json_value message 1 1 "${WORKSPACE_FOLDER_NAME}" 1>&2 && exit 1 } fi + return 0 } ################################################### @@ -943,7 +969,6 @@ _process_arguments() { _print_center "justify" "Given Input" ": FILE" "=" _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" _upload_file "${UPLOAD_METHOD:-create}" "${INPUT}" "${WORKSPACE_FOLDER_ID}" "${ACCESS_TOKEN}" - FILE_ID="${SKIP_DUPLICATES_FILE_ID:-${FILE_ID}}" [[ ${UPLOAD_STATUS} = ERROR ]] && for _ in {1..2}; do _clear_line 1; done && continue "${SHARE:-:}" "${FILE_ID}" "${ACCESS_TOKEN}" "${SHARE_EMAIL}" _print_center "justify" "DriveLink" "${SHARE:+ (SHARED)}" "-" @@ -1070,8 +1095,9 @@ _process_arguments() { _gen_final_list() { file="${1}" __rootdir="$(_dirname "${file}")" - printf "%s\n" "${__rootdir}|:_//_:|$(__temp="$(grep "|:_//_:|${__rootdir}|:_//_:|" <<< "${DIRIDS}")" && + printf "%s\n" "${__rootdir}|:_//_:|$(__temp="$(grep "|:_//_:|${__rootdir}|:_//_:|" <<< "${DIRIDS}" || :)" && printf "%s\n" "${__temp//"|:_//_:|"${__rootdir}*/}")|:_//_:|${file}" + return 0 } export -f _gen_final_list _dirname && export DIRIDS @@ -1182,6 +1208,7 @@ _process_arguments() { printf "\n" fi done + return 0 } main() { @@ -1203,7 +1230,7 @@ main() { _setup_arguments "${@}" _check_debug && "${SKIP_INTERNET_CHECK:-_check_internet}" - { [[ -n ${PARALLEL_UPLOAD} ]] && _setup_tempfile; } || : + [[ -n ${PARALLEL_UPLOAD} ]] && _setup_tempfile START="$(printf "%(%s)T\\n" "-1")" _print_center "justify" "Starting script" "-" diff --git a/utils.sh b/utils.sh index de0c4fc..9fb3bb2 100755 --- a/utils.sh +++ b/utils.sh @@ -244,15 +244,16 @@ _full_path() { # Result: print fetched sha ################################################### _get_latest_sha() { + [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 declare LATEST_SHA case "${1:-${TYPE}}" in branch) - LATEST_SHA="$(hash="$(curl --compressed -s https://github.com/"${3:-${REPO}}"/commits/"${2:-${TYPE_VALUE}}".atom -r 0-2000 | grep "Commit\\/" -m1)" && { + LATEST_SHA="$(hash="$(curl --compressed -s https://github.com/"${3:-${REPO}}"/commits/"${2:-${TYPE_VALUE}}".atom -r 0-2000 | grep "Commit\\/" -m1 || :)" && { read -r firstline <<< "${hash}" && regex="(/.*<)" && [[ ${firstline} =~ ${regex} ]] && printf "%s\n" "${BASH_REMATCH[1]:1:-1}" })" ;; release) - LATEST_SHA="$(hash="$(curl -L --compressed -s https://github.com/"${3:-${REPO}}"/releases/"${2:-${TYPE_VALUE}}" | grep "=\"/""${3:-${REPO}}""/commit" -m1)" && { + LATEST_SHA="$(hash="$(curl -L --compressed -s https://github.com/"${3:-${REPO}}"/releases/"${2:-${TYPE_VALUE}}" | grep "=\"/""${3:-${REPO}}""/commit" -m1 || :)" && { read -r firstline <<< "${hash}" && : "${hash/*commit\//}" && printf "%s\n" "${_/\"*/}" })" ;; @@ -288,7 +289,7 @@ _json_value() { declare LC_ALL=C num { [[ ${2} =~ ^([0-9]+)+$ ]] && no_of_lines="${2}"; } || : { [[ ${3} =~ ^([0-9]+)+$ ]] && num="${3}"; } || { [[ ${3} != all ]] && num=1; } - grep -o "\"${1}\"\:.*" ${no_of_lines:+-m ${no_of_lines}} | sed -e "s/.*\"""${1}""\"://" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num}"p + grep -o "\"${1}\"\:.*" ${no_of_lines:+-m ${no_of_lines}} | sed -e "s/.*\"""${1}""\"://" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num}"p || : } ###################################################