diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cce12dd..48cb96f 100755 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -43,5 +43,5 @@ jobs: set -e { { command -v apt-get && sudo apt-get install snapd -yqq && sudo snap install shfmt ;} || { command -v brew && brew install shfmt ;} ;} 2>| /dev/null 1>&2 - sh merge.sh + sh release.sh { [ -n "$(git diff)" ] && printf "%s\n" "Error: Format scripts and run merge.sh before pushing the commits." && git diff && exit 1 ;} || : diff --git a/README.md b/README.md old mode 100755 new mode 100644 index 03903a3..8f6d360 --- a/README.md +++ b/README.md @@ -53,7 +53,6 @@ It utilizes google drive api v3 and google OAuth2.0 to generate access tokens an Installation and Usage documentation is available at [https://labbots.github.io/google-drive-upload/](https://labbots.github.io/google-drive-upload/) - ## Reporting Issues | Issues Status | [![GitHub issues](https://img.shields.io/github/issues/labbots/google-drive-upload.svg?label=&style=for-the-badge)](https://GitHub.com/labbots/google-drive-upload/issues/) | [![GitHub issues-closed](https://img.shields.io/github/issues-closed/labbots/google-drive-upload.svg?label=&color=success&style=for-the-badge)](https://GitHub.com/labbots/google-drive-upload/issues?q=is%3Aissue+is%3Aclosed) | diff --git a/bash/auth-utils.bash b/bash/auth-utils.bash deleted file mode 100755 index 23d2954..0000000 --- a/bash/auth-utils.bash +++ /dev/null @@ -1,502 +0,0 @@ -#!/usr/bin/env bash -# auth utils for Google Drive -# shellcheck source=/dev/null - -################################################### -# Check if account name is valid by a regex expression -# Globals: None -# Arguments: 1 -# ${1} = Account name -# Result: read description and return 1 or 0 -################################################### -_account_name_valid() { - declare name="${1:-}" account_name_regex='^([A-Za-z0-9_])+$' - [[ ${name} =~ ${account_name_regex} ]] || return 1 - return 0 -} - -################################################### -# Check if account exists -# First check if the given account is in correct format -# then check if client [id|token] and refresh token is present -# Globals: 2 functions -# _set_value, _account_name_valid -# Arguments: 1 -# ${1} = Account name -# Result: read description and return 1 or 0 -################################################### -_account_exists() { - declare name="${1:-}" client_id client_secret refresh_token - _account_name_valid "${name}" || return 1 - _set_value indirect client_id "ACCOUNT_${name}_CLIENT_ID" - _set_value indirect client_secret "ACCOUNT_${name}_CLIENT_SECRET" - _set_value indirect refresh_token "ACCOUNT_${name}_REFRESH_TOKEN" - [[ -z ${client_id:+${client_secret:+${refresh_token}}} ]] && return 1 - return 0 -} - -################################################### -# Show all accounts configured in config file -# Globals: 2 variables, 4 functions -# Variable - CONFIG, QUIET -# Functions - _account_exists, _set_value, _print_center, _reload_config -# Arguments: None -# Result: SHOW all accounts, export COUNT and ACC_${count}_ACC dynamic variables -# or print "No accounts configured yet." -################################################### -_all_accounts() { - { _reload_config && _handle_old_config; } || return 1 - declare all_accounts && COUNT=0 - mapfile -t all_accounts <<< "$(grep -oE '^ACCOUNT_.*_CLIENT_ID' "${CONFIG}" | sed -e "s/ACCOUNT_//g" -e "s/_CLIENT_ID//g")" - for account in "${all_accounts[@]}"; do - [[ -n ${account} ]] && _account_exists "${account}" && - { [[ ${COUNT} = 0 ]] && "${QUIET:-_print_center}" "normal" " All available accounts. " "=" || :; } && - printf "%b" "$((COUNT += 1)). ${account} \n" && _set_value direct "ACC_${COUNT}_ACC" "${account}" - done - { [[ ${COUNT} -le 0 ]] && "${QUIET:-_print_center}" "normal" " No accounts configured yet. " "=" 1>&2; } || printf '\n' - return 0 -} - -################################################### -# Setup a new account name -# If given account name is configured already, then ask for name -# after name has been properly setup, export ACCOUNT_NAME var -# Globals: 1 variable, 5 functions -# Variable - QUIET -# Functions - _print_center, _account_exists, _clear_line, _account_name_valid, _reload_config -# Arguments: 1 -# ${1} = Account name ( optional ) -# Result: read description and export ACCOUNT_NAME NEW_ACCOUNT_NAME -################################################### -_set_new_account_name() { - _reload_config || return 1 - declare new_account_name="${1:-}" name_valid - [[ -z ${new_account_name} ]] && { - _all_accounts 2>| /dev/null - "${QUIET:-_print_center}" "normal" " New account name: " "=" - "${QUIET:-_print_center}" "normal" "Info: Account names can only contain alphabets / numbers / dashes." " " && printf '\n' - } - until [[ -n ${name_valid} ]]; do - if [[ -n ${new_account_name} ]]; then - if _account_name_valid "${new_account_name}"; then - if _account_exists "${new_account_name}"; then - "${QUIET:-_print_center}" "normal" " Warning: Given account ( ${new_account_name} ) already exists, input different name. " "-" 1>&2 - unset new_account_name && continue - else - export NEW_ACCOUNT_NAME="${new_account_name}" ACCOUNT_NAME="${new_account_name}" && name_valid="true" && continue - fi - else - "${QUIET:-_print_center}" "normal" " Warning: Given account name ( ${new_account_name} ) invalid, input different name. " "-" 1>&2 - unset new_account_name && continue - fi - else - [[ -t 1 ]] || { "${QUIET:-_print_center}" "normal" " Error: Not running in an interactive terminal, cannot ask for new account name. " 1>&2 && return 1; } - printf -- "-> \033[?7l" - read -r new_account_name - printf '\033[?7h' - fi - _clear_line 1 - done - "${QUIET:-_print_center}" "normal" " Given account name: ${NEW_ACCOUNT_NAME} " "=" - export ACCOUNT_NAME="${NEW_ACCOUNT_NAME}" - return 0 -} - -################################################### -# Delete a account from config file -# Globals: 2 variables, 3 functions -# Variables - CONFIG, QUIET -# Functions - _account_exists, _print_center, _reload_config -# Arguments: None -# Result: check if account exists and delete from config, else print error message -################################################### -_delete_account() { - { _reload_config && _handle_old_config; } || return 1 - declare account="${1:?Error: give account name}" regex config_without_values - if _account_exists "${account}"; then - regex="^ACCOUNT_${account}_(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)|DEFAULT_ACCOUNT=\"${account}\"" - config_without_values="$(grep -vE "${regex}" "${CONFIG}")" - chmod u+w "${CONFIG}" || return 1 # change perms to edit - printf "%s\n" "${config_without_values}" >| "${CONFIG}" || return 1 - chmod "a-w-r-x,u+r" "${CONFIG}" || return 1 # restore perms - "${QUIET:-_print_center}" "normal" " Successfully deleted account ( ${account} ) from config. " "-" - _reload_config # reload config if successfully deleted - else - "${QUIET:-_print_center}" "normal" " Error: Cannot delete account ( ${account} ) from config. No such account exists. " "-" 1>&2 - fi - return 0 -} - -################################################### -# handle legacy config -# this will be triggered only if old config values are present, convert to new format -# new account will be created with "default" name, if default already taken, then add a number as suffix -# Globals: 7 variables, 2 functions -# Variables - CLIENT_ID CLIENT_SECRET, REFRESH_TOKEN, ROOT_FOLDER, ROOT_FOLDER_NAME CONFIG, ACCOUNT_NAME -# Functions - _account_exists, _reload_config -# Arguments: None -################################################### -_handle_old_config() { - export CLIENT_ID CLIENT_SECRET REFRESH_TOKEN # to handle a shellcheck warning - # only try to convert the if all three values are present - [[ -n ${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}} ]] && { - declare account_name="default" regex config_without_values count=0 - # first try to name the new account as default, otherwise try to add numbers as suffix - until ! _account_exists "${account_name}"; do - account_name="${account_name}$((count += 1))" - done - # form a regex expression to remove values from config, _update_config isn't used here to prevent a loop and multiple grep calls - regex="^(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)" - config_without_values="$(grep -vE "${regex}" "${CONFIG}")" - chmod u+w "${CONFIG}" || return 1 # change perms to edit - printf "%s\n%s\n%s\n%s\n%s\n%s\n" \ - "ACCOUNT_${account_name}_CLIENT_ID=\"${CLIENT_ID}\"" \ - "ACCOUNT_${account_name}_CLIENT_SECRET=\"${CLIENT_SECRET}\"" \ - "ACCOUNT_${account_name}_REFRESH_TOKEN=\"${REFRESH_TOKEN}\"" \ - "ACCOUNT_${account_name}_ROOT_FOLDER=\"${ROOT_FOLDER}\"" \ - "ACCOUNT_${account_name}_ROOT_FOLDER_NAME=\"${ROOT_FOLDER_NAME}\"" \ - "${config_without_values}" >| "${CONFIG}" || return 1 - - chmod "a-w-r-x,u+r" "${CONFIG}" || return 1 # restore perms - - _reload_config || return 1 # reload config file - } - return 0 -} - -################################################### -# handle old config values, new account creation, custom account name, updating default config and account -# start token service if applicable -# Globals: 12 variables, 7 functions -# Variables - DEFAULT_CONFIG, NEW_ACCOUNT_NAME, CUSTOM_ACCOUNT_NAME, DELETE_ACCOUNT_NAME, LIST_ACCOUNTS, QUIET -# UPDATE_DEFAULT_ACCOUNT, UPDATE_DEFAULT_CONFIG, CONFIG_INFO, CONTINUE_WITH_NO_INPUT -# Functions - _reload_config, _handle_old_config, _set_new_account_name, _account_exists, _all_accounts -# _check_account_credentials, _token_bg_service, _print_center, _update_config, _set_value -# Arguments: None -# Result: read description and start access token check in bg if required -################################################### -_check_credentials() { - { _reload_config && _handle_old_config; } || return 1 - # set account name to default account name - ACCOUNT_NAME="${DEFAULT_ACCOUNT}" - - if [[ -n ${NEW_ACCOUNT_NAME} ]]; then - # create new account, --create-account flag - _set_new_account_name "${NEW_ACCOUNT_NAME}" || return 1 - _check_account_credentials "${ACCOUNT_NAME}" || return 1 - else - # use custom account, --account flag - if [[ -n ${CUSTOM_ACCOUNT_NAME} ]]; then - if _account_exists "${CUSTOM_ACCOUNT_NAME}"; then - ACCOUNT_NAME="${CUSTOM_ACCOUNT_NAME}" - else - # error out in case CUSTOM_ACCOUNT_NAME is invalid - "${QUIET:-_print_center}" "normal" " Error: No such account ( ${CUSTOM_ACCOUNT_NAME} ) exists. " "-" && return 1 - fi - elif [[ -n ${DEFAULT_ACCOUNT} ]]; then - # check if default account if valid or not, else set account name to nothing and remove default account in config - _account_exists "${DEFAULT_ACCOUNT}" || { - _update_config DEFAULT_ACCOUNT "" "${CONFIG}" && unset DEFAULT_ACCOUNT ACCOUNT_NAME && UPDATE_DEFAULT_ACCOUNT="_update_config" - } - # UPDATE_DEFAULT_ACCOUNT to true so that default config is updated later - else - UPDATE_DEFAULT_ACCOUNT="_update_config" # as default account doesn't exist - fi - - # in case no account name is set at this point of script - if [[ -z ${ACCOUNT_NAME} ]]; then - # if accounts are configured but default account is not set - # COUNT comes from _all_accounts function - if _all_accounts 2>| /dev/null && [[ ${COUNT} -gt 0 ]]; then - # set ACCOUNT_NAME without asking if only one account available - if [[ ${COUNT} -eq 1 ]]; then - _set_value indirect ACCOUNT_NAME "ACC_1_ACC" # ACC_1_ACC comes from _all_accounts function - else - "${QUIET:-_print_center}" "normal" " Above accounts are configured, but default one not set. " "=" - if [[ -t 1 ]]; then - "${QUIET:-_print_center}" "normal" " Choose default account: " "-" - until [[ -n ${ACCOUNT_NAME} ]]; do - printf -- "-> \033[?7l" - read -r account_name - printf '\033[?7h' - if [[ ${account_name} -gt 0 && ${account_name} -le ${COUNT} ]]; then - _set_value indirect ACCOUNT_NAME "ACC_${COUNT}_ACC" - else - _clear_line 1 - fi - done - else - # if not running in a terminal then choose 1st one as default - printf "%s\n" "Warning: Script is not running in a terminal, choosing first account as default." - _set_value indirect ACCOUNT_NAME "ACC_1_ACC" # ACC_1_ACC comes from _all_accounts function - fi - fi - else - _set_new_account_name "" || return 1 - _check_account_credentials "${ACCOUNT_NAME}" || return 1 - fi - fi - _check_account_credentials "${ACCOUNT_NAME}" || return 1 - fi - - "${UPDATE_DEFAULT_ACCOUNT:-:}" DEFAULT_ACCOUNT "${ACCOUNT_NAME}" "${CONFIG}" # update default account if required - "${UPDATE_DEFAULT_CONFIG:-:}" CONFIG "${CONFIG}" "${CONFIG_INFO}" # update default config if required - - # only launch the token service if there was some input - [[ -n ${CONTINUE_WITH_NO_INPUT} ]] || _token_bg_service # launch token bg service - return 0 -} - -################################################### -# check credentials for a given account name -# Globals: 3 functions -# Functions - _check_client, _check_refresh_token, _check_access_token -# Arguments: 2 -# ${1} = Account name ( optional ) -# Result: read description, return 1 or 0 -################################################### -_check_account_credentials() { - declare account_name="${1:-}" - { - _check_client ID "${account_name}" && - _check_client SECRET "${account_name}" && - _check_refresh_token "${account_name}" && - _check_access_token "${account_name}" check - } || return 1 - return 0 -} - -################################################### -# Check client id or secret and ask if required -# Globals: 4 variables, 3 functions -# Variables - CONFIG, QUIET, CLIENT_ID_${ACCOUNT_NAME}, CLIENT_SECRET_${ACCOUNT_NAME} -# Functions - _print_center, _update_config, _set_value -# Arguments: 2 -# ${1} = ID or SECRET -# ${2} = Account name ( optional - if not given, then just CLIENT_[ID|SECRET] var is used ) -# Result: read description and export ACCOUNT_name_CLIENT_[ID|SECRET] CLIENT_[ID|SECRET] -################################################### -_check_client() { - declare type="CLIENT_${1:?Error: ID or SECRET}" account_name="${2:-}" \ - type_name type_value type_regex valid client message - export client_id_regex='[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com' client_secret_regex='[0-9A-Za-z_-]+' - type_name="${account_name:+ACCOUNT_${account_name}_}${type}" - - # set the type_value to the actual value of ACCOUNT_${account_name}_[ID|SECRET] - _set_value indirect type_value "${type_name}" - # set the type_regex to the actual value of client_id_regex or client_secret_regex - _set_value indirect type_regex "${type}_regex" - - until [[ -n ${type_value} && -n ${valid} ]]; do - [[ -n ${type_value} ]] && { - if [[ ${type_value} =~ ${type_regex} ]]; then - [[ -n ${client} ]] && { _update_config "${type_name}" "${type_value}" "${CONFIG}" || return 1; } - valid="true" && continue - else - { [[ -n ${client} ]] && message="- Try again"; } || message="in config ( ${CONFIG} )" - "${QUIET:-_print_center}" "normal" " Invalid Client ${1} ${message} " "-" && unset "${type_name}" client - fi - } - [[ -z ${client} ]] && printf "\n" && "${QUIET:-_print_center}" "normal" " Enter Client ${1} " "-" - [[ -n ${client} ]] && _clear_line 1 - printf -- "-> " - read -r "${type_name?}" && client=1 - _set_value indirect type_value "${type_name}" - done - - # export ACCOUNT_name_CLIENT_[ID|SECRET] - _set_value direct "${type_name}" "${type_value}" - # export CLIENT_[ID|SECRET] - _set_value direct "${type}" "${type_value}" - - return 0 -} - -################################################### -# Check refresh token and ask if required -# Globals: 8 variables, 4 functions -# Variables - CLIENT_ID, CLIENT_SECRET, REDIRECT_URI, TOKEN_URL, CONFIG, QUIET -# Functions - _set_value, _print_center, _update_config, _check_access_token -# Arguments: 1 -# ${1} = Account name ( optional - if not given, then just REFRESH_TOKEN var is used ) -# Result: read description & export REFRESH_TOKEN ACCOUNT_${account_name}_REFRESH_TOKEN -################################################### -_check_refresh_token() { - # bail out before doing anything if client id and secret is not present, unlikely to happen but just in case - [[ -z ${CLIENT_ID:+${CLIENT_SECRET}} ]] && return 1 - declare account_name="${1:-}" \ - refresh_token_regex='[0-9]//[0-9A-Za-z_-]+' authorization_code_regex='[0-9]/[0-9A-Za-z_-]+' - declare refresh_token_name="${account_name:+ACCOUNT_${account_name}_}REFRESH_TOKEN" check_error - - _set_value indirect refresh_token_value "${refresh_token_name}" - - [[ -n ${refresh_token_value} ]] && { - ! [[ ${refresh_token_value} =~ ${refresh_token_regex} ]] && - "${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token in config file, follow below steps.. " "-" && unset refresh_token_value - } - - [[ -z ${refresh_token_value} ]] && { - printf "\n" && "${QUIET:-_print_center}" "normal" "If you have a refresh token generated, then type the token, else leave blank and press return key.." " " - printf "\n" && "${QUIET:-_print_center}" "normal" " Refresh Token " "-" && printf -- "-> " - read -r refresh_token_value - if [[ -n ${refresh_token_value} ]]; then - "${QUIET:-_print_center}" "normal" " Checking refresh token.. " "-" - if [[ ${refresh_token_value} =~ ${refresh_token_regex} ]]; then - _set_value direct REFRESH_TOKEN "${refresh_token_value}" - { _check_access_token "${account_name}" skip_check && - _update_config "${refresh_token_name}" "${refresh_token_value}" "${CONFIG}" && - _clear_line 1; } || check_error=true - else - check_error=true - fi - [[ -n ${check_error} ]] && "${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token given, follow below steps to generate.. " "-" && unset refresh_token_value - else - "${QUIET:-_print_center}" "normal" " No Refresh token given, follow below steps to generate.. " "-" && unset refresh_token_value - fi - - [[ -z ${refresh_token_value} ]] && { - printf "\n" && "${QUIET:-_print_center}" "normal" "Visit the below URL, tap on allow and then enter the code obtained" " " - URL="https://accounts.google.com/o/oauth2/auth?client_id=${CLIENT_ID}&redirect_uri=${REDIRECT_URI}&scope=${SCOPE}&response_type=code&prompt=consent" - printf "\n%s\n" "${URL}" - declare AUTHORIZATION_CODE authorization_code AUTHORIZATION_CODE_VALID response - until [[ -n ${AUTHORIZATION_CODE} && -n ${AUTHORIZATION_CODE_VALID} ]]; do - [[ -n ${AUTHORIZATION_CODE} ]] && { - if [[ ${AUTHORIZATION_CODE} =~ ${authorization_code_regex} ]]; then - AUTHORIZATION_CODE_VALID="true" && continue - else - "${QUIET:-_print_center}" "normal" " Invalid CODE given, try again.. " "-" && unset AUTHORIZATION_CODE authorization_code - fi - } - { [[ -z ${authorization_code} ]] && printf "\n" && "${QUIET:-_print_center}" "normal" " Enter the authorization code " "-"; } || _clear_line 1 - printf -- "-> \033[?7l" - read -r AUTHORIZATION_CODE && authorization_code=1 - printf '\033[?7h' - done - response="$(curl --compressed "${CURL_PROGRESS}" -X POST \ - --data "code=${AUTHORIZATION_CODE}&client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&redirect_uri=${REDIRECT_URI}&grant_type=authorization_code" "${TOKEN_URL}")" || : - _clear_line 1 1>&2 - - refresh_token_value="$(_json_value refresh_token 1 1 <<< "${response}")" || - { printf "%s\n" "Error: Cannot fetch refresh token, make sure the authorization code was correct." && return 1; } - - _set_value direct REFRESH_TOKEN "${refresh_token_value}" - { _check_access_token "${account_name}" skip_check "${response}" && - _update_config "${refresh_token_name}" "${refresh_token_value}" "${CONFIG}"; } || return 1 - } - printf "\n" - } - - # export ACCOUNT_name_REFRESH_TOKEN - _set_value direct "${refresh_token_name}" "${refresh_token_value}" - # export REFRESH_TOKEN - _set_value direct REFRESH_TOKEN "${refresh_token_value}" - - return 0 -} - -################################################### -# Check access token and create/update if required -# Also update in config -# Globals: 9 variables, 3 functions -# Variables - CLIENT_ID, CLIENT_SECRET, REFRESH_TOKEN, TOKEN_URL, CONFIG, API_URL, API_VERSION, QUIET -# Functions - _print_center, _update_config, _set_value -# Arguments: 2 -# ${1} = Account name ( optional - if not given, then just ACCESS_TOKEN var is used ) -# ${2} = if skip_check, then force create access token, else check with regex and expiry -# ${3} = json response ( optional ) -# Result: read description & export ACCESS_TOKEN ACCESS_TOKEN_EXPIRY -################################################### -_check_access_token() { - # bail out before doing anything if client id|secret or refresh token is not present, unlikely to happen but just in case - [[ -z ${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}} ]] && return 1 - - declare account_name="${1:-}" no_check="${2:-false}" response_json="${3:-}" \ - token_name token_expiry_name token_value token_expiry_value response \ - access_token_regex='ya29\.[0-9A-Za-z_-]+' - declare token_name="${account_name:+ACCOUNT_${account_name}_}ACCESS_TOKEN" - declare token_expiry_name="${token_name}_EXPIRY" - - _set_value indirect token_value "${token_name}" - _set_value indirect token_expiry_value "${token_expiry_name}" - - [[ ${no_check} = skip_check || -z ${token_value} || ${token_expiry_value:-0} -lt "$(printf "%(%s)T\\n" "-1")" || ! ${token_value} =~ ${access_token_regex} ]] && { - response="${response_json:-$(curl --compressed -s -X POST --data \ - "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")}" || : - - if token_value="$(_json_value access_token 1 1 <<< "${response}")"; then - token_expiry_value="$(($(printf "%(%s)T\\n" "-1") + $(_json_value expires_in 1 1 <<< "${response}") - 1))" - _update_config "${token_name}" "${token_value}" "${CONFIG}" || return 1 - _update_config "${token_expiry_name}" "${token_expiry_value}" "${CONFIG}" || return 1 - else - "${QUIET:-_print_center}" "justify" "Error: Something went wrong" ", printing error." "=" 1>&2 - printf "%s\n" "${response}" 1>&2 - return 1 - fi - } - - # export ACCESS_TOKEN and ACCESS_TOKEN_EXPIRY - _set_value direct ACCESS_TOKEN "${token_value}" - _set_value direct ACCESS_TOKEN_EXPIRY "${token_expiry_value}" - - # export INITIAL_ACCESS_TOKEN which is used on script cleanup - _set_value direct INITIAL_ACCESS_TOKEN "${ACCESS_TOKEN}" - return 0 -} - -################################################### -# load config file if available, else create a empty file -# uses global variable CONFIG -################################################### -_reload_config() { - { [[ -r ${CONFIG} ]] && . "${CONFIG}"; } || { printf "" >> "${CONFIG}" || return 1; } - return 0 -} - -################################################### -# launch a background service to check access token and update it -# checks ACCESS_TOKEN_EXPIRY, try to update before 5 mins of expiry, a fresh token gets 60 mins -# process will be killed when script exits or "${MAIN_PID}" is killed -# Globals: 4 variables, 1 function -# Variables - ACCESS_TOKEN, ACCESS_TOKEN_EXPIRY, MAIN_PID, TMPFILE -# Functions - _check_access_token -# Arguments: None -# Result: read description & export ACCESS_TOKEN_SERVICE_PID -################################################### -_token_bg_service() { - [[ -z ${MAIN_PID} ]] && return 0 # don't start if MAIN_PID is empty - printf "%b\n" "ACCESS_TOKEN=\"${ACCESS_TOKEN}\"\nACCESS_TOKEN_EXPIRY=\"${ACCESS_TOKEN_EXPIRY}\"" >| "${TMPFILE}_ACCESS_TOKEN" - { - until ! kill -0 "${MAIN_PID}" 2>| /dev/null 1>&2; do - . "${TMPFILE}_ACCESS_TOKEN" - CURRENT_TIME="$(printf "%(%s)T\\n" "-1")" - REMAINING_TOKEN_TIME="$((ACCESS_TOKEN_EXPIRY - CURRENT_TIME))" - if [[ ${REMAINING_TOKEN_TIME} -le 300 ]]; then - # timeout after 30 seconds, it shouldn't take too long anyway, and update tmp config - CONFIG="${TMPFILE}_ACCESS_TOKEN" _timeout 30 _check_access_token "" skip_check || : - else - TOKEN_PROCESS_TIME_TO_SLEEP="$(if [[ ${REMAINING_TOKEN_TIME} -le 301 ]]; then - printf "0\n" - else - printf "%s\n" "$((REMAINING_TOKEN_TIME - 300))" - fi)" - sleep "${TOKEN_PROCESS_TIME_TO_SLEEP}" - fi - sleep 1 - done - } & - export ACCESS_TOKEN_SERVICE_PID="${!}" - return 0 -} - -export -f _account_name_valid \ - _account_exists \ - _all_accounts \ - _set_new_account_name \ - _delete_account \ - _handle_old_config \ - _check_credentials \ - _check_account_credentials \ - _check_client \ - _check_refresh_token \ - _check_access_token \ - _reload_config diff --git a/bash/common-utils.bash b/bash/common-utils.bash deleted file mode 100755 index 33855cb..0000000 --- a/bash/common-utils.bash +++ /dev/null @@ -1,423 +0,0 @@ -#!/usr/bin/env bash -# Functions that will used in core script - -################################################### -# Convert bytes to human readable form -# Globals: None -# Required Arguments: 1 -# ${1} = Positive integer ( bytes ) -# Result: Print human readable form. -# Reference: -# https://unix.stackexchange.com/a/259254 -################################################### -_bytes_to_human() { - declare b=${1:-0} d='' s=0 S=(Bytes {K,M,G,T,P,E,Y,Z}B) - while ((b > 1024)); do - d="$(printf ".%02d" $((b % 1024 * 100 / 1024)))" - b=$((b / 1024)) && ((s++)) - done - printf "%s\n" "${b}${d} ${S[${s}]}" -} - -################################################### -# Check for bash version >= 4.x -# Globals: 1 Variable -# BASH_VERSINFO -# Required Arguments: None -# Result: If -# SUCEESS: Status 0 -# ERROR: print message and exit 1 -################################################### -_check_bash_version() { - { ! [[ ${BASH_VERSINFO:-0} -ge 4 ]] && printf "Bash version lower than 4.x not supported.\n" && exit 1; } || : -} - -################################################### -# Check if debug is enabled and enable command trace -# Globals: 2 variables, 1 function -# Varibles - DEBUG, QUIET -# Function - _is_terminal -# Arguments: None -# Result: If DEBUG -# Present - Enable command trace and change print functions to avoid spamming. -# Absent - Disable command trace -# Check QUIET, then check terminal size and enable print functions accordingly. -################################################### -_check_debug() { - if [[ -n ${DEBUG} ]]; then - set -x && PS4='-> ' - _print_center() { { [[ $# = 3 ]] && printf "%s\n" "${2}"; } || { printf "%s%s\n" "${2}" "${3}"; }; } - _clear_line() { :; } && _newline() { :; } - else - if [[ -z ${QUIET} ]]; then - if _support_ansi_escapes; then - # This refreshes the interactive shell so we can use the ${COLUMNS} variable in the _print_center function. - shopt -s checkwinsize && (: && :) - if [[ ${COLUMNS} -lt 45 ]]; then - _print_center() { { [[ $# = 3 ]] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - else - trap 'shopt -s checkwinsize; (:;:)' SIGWINCH - fi - export CURL_PROGRESS="-#" EXTRA_LOG="_print_center" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" - else - _print_center() { { [[ $# = 3 ]] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - _clear_line() { :; } - fi - _newline() { printf "%b" "${1}"; } - else - _print_center() { :; } && _clear_line() { :; } && _newline() { :; } - fi - set +x - fi - export -f _print_center _clear_line _newline -} - -################################################### -# Check internet connection. -# Probably the fastest way, takes about 1 - 2 KB of data, don't check for more than 10 secs. -# Globals: 3 functions -# _print_center, _clear_line, _timeout -# Arguments: None -# Result: On -# Success - Nothing -# Error - print message and exit 1 -################################################### -_check_internet() { - "${EXTRA_LOG}" "justify" "Checking Internet Connection.." "-" - if ! _timeout 10 curl -Is google.com; then - _clear_line 1 - "${QUIET:-_print_center}" "justify" "Error: Internet connection" " not available." "=" - return 1 - fi - _clear_line 1 -} - -################################################### -# Move cursor to nth no. of line and clear it to the begining. -# Globals: None -# Arguments: 1 -# ${1} = Positive integer ( line number ) -# Result: Read description -################################################### -_clear_line() { - printf "\033[%sA\033[2K" "${1}" -} - -################################################### -# Alternative to wc -l command -# Globals: None -# Arguments: 1 or pipe -# ${1} = file, _count < file -# variable, _count <<< variable -# pipe = echo something | _count -# Result: Read description -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#get-the-number-of-lines-in-a-file -################################################### -_count() { - mapfile -tn 0 lines - printf '%s\n' "${#lines[@]}" -} - -################################################### -# Alternative to dirname command -# Globals: None -# Arguments: 1 -# ${1} = path of file or folder -# Result: read description -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#get-the-directory-name-of-a-file-path -################################################### -_dirname() { - declare tmp=${1:-.} - - [[ ${tmp} != *[!/]* ]] && { printf '/\n' && return; } - tmp="${tmp%%"${tmp##*[!/]}"}" - - [[ ${tmp} != */* ]] && { printf '.\n' && return; } - tmp=${tmp%/*} && tmp="${tmp%%"${tmp##*[!/]}"}" - - printf '%s\n' "${tmp:-/}" -} - -################################################### -# Convert given time in seconds to readable form -# 110 to 1 minute(s) and 50 seconds -# Globals: None -# Arguments: 1 -# ${1} = Positive Integer ( time in seconds ) -# Result: read description -# Reference: -# https://stackoverflow.com/a/32164707 -################################################### -_display_time() { - declare T="${1}" - declare DAY="$((T / 60 / 60 / 24))" HR="$((T / 60 / 60 % 24))" MIN="$((T / 60 % 60))" SEC="$((T % 60))" - [[ ${DAY} -gt 0 ]] && printf '%d days ' "${DAY}" - [[ ${HR} -gt 0 ]] && printf '%d hrs ' "${HR}" - [[ ${MIN} -gt 0 ]] && printf '%d minute(s) ' "${MIN}" - [[ ${DAY} -gt 0 || ${HR} -gt 0 || ${MIN} -gt 0 ]] && printf 'and ' - printf '%d seconds\n' "${SEC}" -} - -################################################### -# Fetch latest commit sha of release or branch -# Do not use github rest api because rate limit error occurs -# Globals: None -# Arguments: 3 -# ${1} = "branch" or "release" -# ${2} = branch name or release name -# ${3} = repo name e.g labbots/google-drive-upload -# Result: print fetched sha -################################################### -_get_latest_sha() { - declare LATEST_SHA - case "${1:-${TYPE}}" in - branch) - LATEST_SHA="$( - : "$(curl --compressed -s https://github.com/"${3:-${REPO}}"/commits/"${2:-${TYPE_VALUE}}".atom -r 0-2000)" - : "$(printf "%s\n" "${_}" | grep -o "Commit\\/.*<" -m1 || :)" && : "${_##*\/}" && printf "%s\n" "${_%%<*}" - )" - ;; - release) - LATEST_SHA="$( - : "$(curl -L --compressed -s https://github.com/"${3:-${REPO}}"/releases/"${2:-${TYPE_VALUE}}")" - : "$(printf "%s\n" "${_}" | grep "=\"/""${3:-${REPO}}""/commit" -m1 || :)" && : "${_##*commit\/}" && printf "%s\n" "${_%%\"*}" - )" - ;; - esac - printf "%b" "${LATEST_SHA:+${LATEST_SHA}\n}" -} - -################################################### -# Encode the given string to parse properly as json -# Globals: None -# Arguments: 2 -# ${1} = json or something else -# ${2} = input -# Result: if ${1} is j, then escape all chars, else only special chars -# Reference: -# https://tools.ietf.org/html/rfc7159#section-7 -################################################### -_json_escape() { - declare mode="${1:?}" input="${2:?Provide Input}" - [[ ${mode} = "j" ]] && { - # \ and / - : "${input//\\/\\\\}" - : "${_//\//\\\/}" - # : "${_//\'/\\\'}" # ' (not strictly needed ?) - input="${_//\"/\\\"}" # " - } - : "${input//$'\t'/\\\t}" # \t (tab) - : "${_//$'\n'/\\\n}" # \n (newline) - : "${_//$'\r'/\\\r}" # \r (carriage return) - : "${_//$'\f'/\\\f}" # \f (form feed) - : "${_//$'\b'/\\\b}" # \b (backspace) - printf "%s" "${_}" -} - -################################################### -# Method to extract specified field data from json -# Globals: None -# Arguments: 2 -# ${1} - value of field to fetch from json -# ${2} - Optional, no of lines to parse for the given field in 1st arg -# ${3} - Optional, nth number of value from extracted values, default it 1. -# Input: file | here string | pipe -# _json_value "Arguments" < file -# _json_value "Arguments" <<< "${varibale}" -# echo something | _json_value "Arguments" -# Result: print extracted value -################################################### -_json_value() { - declare num _tmp no_of_lines - { [[ ${2} -gt 0 ]] && no_of_lines="${2}"; } || : - { [[ ${3} -gt 0 ]] && num="${3}"; } || { [[ ${3} != all ]] && num=1; } - # shellcheck disable=SC2086 - _tmp="$(grep -o "\"${1}\"\:.*" ${no_of_lines:+-m} ${no_of_lines})" || return 1 - printf "%s\n" "${_tmp}" | sed -e "s/.*\"""${1}""\"://" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num}"p || : -} - -################################################### -# Print a text to center interactively and fill the rest of the line with text specified. -# This function is fine-tuned to this script functionality, so may appear unusual. -# Globals: 1 variable -# COLUMNS -# Arguments: 4 -# If ${1} = normal -# ${2} = text to print -# ${3} = symbol -# If ${1} = justify -# If remaining arguments = 2 -# ${2} = text to print -# ${3} = symbol -# If remaining arguments = 3 -# ${2}, ${3} = text to print -# ${4} = symbol -# Result: read description -# Reference: -# https://gist.github.com/TrinityCoder/911059c83e5f7a351b785921cf7ecda -################################################### -_print_center() { - [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare -i TERM_COLS="${COLUMNS}" - declare type="${1}" filler - case "${type}" in - normal) declare out="${2}" && symbol="${3}" ;; - justify) - if [[ $# = 3 ]]; then - declare input1="${2}" symbol="${3}" TO_PRINT out - TO_PRINT="$((TERM_COLS - 5))" - { [[ ${#input1} -gt ${TO_PRINT} ]] && out="[ ${input1:0:TO_PRINT}..]"; } || { out="[ ${input1} ]"; } - else - declare input1="${2}" input2="${3}" symbol="${4}" TO_PRINT temp out - TO_PRINT="$((TERM_COLS * 47 / 100))" - { [[ ${#input1} -gt ${TO_PRINT} ]] && temp+=" ${input1:0:TO_PRINT}.."; } || { temp+=" ${input1}"; } - TO_PRINT="$((TERM_COLS * 46 / 100))" - { [[ ${#input2} -gt ${TO_PRINT} ]] && temp+="${input2:0:TO_PRINT}.. "; } || { temp+="${input2} "; } - out="[${temp}]" - fi - ;; - *) return 1 ;; - esac - - declare -i str_len=${#out} - [[ $str_len -ge $((TERM_COLS - 1)) ]] && { - printf "%s\n" "${out}" && return 0 - } - - declare -i filler_len="$(((TERM_COLS - str_len) / 2))" - [[ $# -ge 2 ]] && ch="${symbol:0:1}" || ch=" " - for ((i = 0; i < filler_len; i++)); do - filler="${filler}${ch}" - done - - printf "%s%s%s" "${filler}" "${out}" "${filler}" - [[ $(((TERM_COLS - str_len) % 2)) -ne 0 ]] && printf "%s" "${ch}" - printf "\n" - - return 0 -} - -################################################### -# Quiet version of _print_center -################################################### -_print_center_quiet() { - { [[ $# = 3 ]] && printf "%s\n" "${2}"; } || printf "%s%s\n" "${2}" "${3}" -} - -################################################### -# Evaluates value1=value2 -# Globals: None -# Arguments: 3 -# ${1} = direct ( d ) or indirect ( i ) - ( evaluation mode ) -# ${2} = var name -# ${3} = var value -# Result: export value1=value2 -################################################### -_set_value() { - case "${1:?}" in - d | direct) export "${2:?}=${3}" ;; - i | indirect) export "${2:?}=${!3}" ;; - esac -} - -################################################### -# Check if script terminal supports ansi escapes -# Globals: 1 variable -# TERM -# Arguments: None -# Result: return 1 or 0 -################################################### -_support_ansi_escapes() { - { [[ -t 2 && -n ${TERM} && ${TERM} =~ (xterm|rxvt|urxvt|linux|vt|screen) ]] && return 0; } || return 1 -} - -################################################### -# Alternative to timeout command -# Globals: None -# Arguments: 1 and rest -# ${1} = amount of time to sleep -# rest = command to execute -# Result: Read description -# Reference: -# https://stackoverflow.com/a/24416732 -################################################### -_timeout() { - declare timeout="${1:?Error: Specify Timeout}" && shift - { - "${@}" & - child="${!}" - trap -- "" TERM - { - sleep "${timeout}" - kill -9 "${child}" - } & - wait "${child}" - } 2>| /dev/null 1>&2 -} - -################################################### -# Config updater -# Incase of old value, update, for new value add. -# Globals: None -# Arguments: 3 -# ${1} = value name -# ${2} = value -# ${3} = config path -# Result: read description -################################################### -_update_config() { - [[ $# -lt 3 ]] && printf "Missing arguments\n" && return 1 - declare value_name="${1}" value="${2}" config_path="${3}" - ! [ -f "${config_path}" ] && : >| "${config_path}" # If config file doesn't exist. - chmod u+w "${config_path}" || return 1 - printf "%s\n%s\n" "$(grep -v -e "^$" -e "^${value_name}=" "${config_path}" || :)" \ - "${value_name}=\"${value}\"" >| "${config_path}" || return 1 - chmod a-w-r-x,u+r "${config_path}" || return 1 - return 0 -} - -################################################### -# Encode the given string to parse properly in network requests -# Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#percent-encode-a-string -################################################### -_url_encode() { - declare LC_ALL=C - for ((i = 0; i < ${#1}; i++)); do - : "${1:i:1}" - case "${_}" in - [a-zA-Z0-9.~_-]) - printf '%s' "${_}" - ;; - *) - printf '%%%02X' "'${_}" - ;; - esac - done 2>| /dev/null - printf '\n' -} - -export -f _bytes_to_human \ - _check_bash_version \ - _check_debug \ - _check_internet \ - _clear_line \ - _count \ - _dirname \ - _display_time \ - _get_latest_sha \ - _json_escape \ - _json_value \ - _print_center \ - _print_center_quiet \ - _set_value \ - _support_ansi_escapes \ - _timeout \ - _update_config \ - _url_encode diff --git a/bash/drive-utils.bash b/bash/drive-utils.bash deleted file mode 100755 index d0b05a5..0000000 --- a/bash/drive-utils.bash +++ /dev/null @@ -1,454 +0,0 @@ -#!/usr/bin/env bash - -################################################### -# Search for an existing file on gdrive with write permission. -# Globals: 3 variables, 2 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value -# Arguments: 4 -# ${1} = file name -# ${2} = root dir id of file -# ${3} = mode ( size or md5Checksum or empty ) -# ${4} = if mode = empty, then not required -# mode = size, then size -# mode = md5Checksum, then md5sum -# Result: print search response if id fetched -# check size and md5sum if mode size or md5Checksum -# Reference: -# https://developers.google.com/drive/api/v3/search-files -################################################### -_check_existing_file() { - [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare name="${1}" rootdir="${2}" mode="${3}" param_value="${4}" query search_response id - - "${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2 - query="$(_url_encode "name=\"${name}\" and '${rootdir}' in parents and trashed=false")" - - search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id,name,mimeType${mode:+,${mode}})&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - _json_value id 1 1 <<< "${search_response}" 2>| /dev/null 1>&2 || return 1 - - [[ -n ${mode} ]] && { - [[ "$(_json_value "${mode}" 1 1 <<< "${search_response}")" = "${param_value}" ]] || return 1 - } - - printf "%s\n" "${search_response}" - return 0 -} - -################################################### -# Copy/Clone a public gdrive file/folder from another/same gdrive account -# Globals: 6 variables, 6 functions -# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _print_center, _check_existing_file, _json_value, _json_escape _bytes_to_human, _clear_line -# Arguments: 5 -# ${1} = update or upload ( upload type ) -# ${2} = file id to upload -# ${3} = root dir id for file -# ${4} = name of file -# ${5} = size of file -# ${6} = md5sum of file -# Result: On -# Success - Upload/Update file and export FILE_ID -# Error - return 1 -# Reference: -# https://developers.google.com/drive/api/v2/reference/files/copy -################################################### -_clone_file() { - [[ $# -lt 5 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare job="${1}" file_id="${2}" file_root_id="${3}" name="${4}" size="${5}" md5="${6}" - declare clone_file_post_data clone_file_response readable_size _file_id description escaped_name && STRING="Cloned" - escaped_name="$(_json_escape j "${name}")" print_name="$(_json_escape p "${name}")" readable_size="$(_bytes_to_human "${size}")" - - # create description data - [[ -n ${DESCRIPTION_FILE} ]] && { - : "${DESCRIPTION_FILE//%f/${name}}" && : "${_//%s/${readable_size}}" - description="$(_json_escape j "${_}")" # escape for json - } - - clone_file_post_data="{\"parents\": [\"${file_root_id}\"]${description:+,\"description\":\"${description}\"}}" - - _print_center "justify" "${print_name} " "| ${readable_size}" "=" - - if [[ ${job} = update ]]; then - declare file_check_json check_value_type check_value - case "${CHECK_MODE}" in - 2) check_value_type="size" check_value="${size}" ;; - 3) check_value_type="md5Checksum" check_value="${md5}" ;; - esac - # Check if file actually exists. - if file_check_json="$(_check_existing_file "${escaped_name}" "${file_root_id}")"; then - if [[ -n ${SKIP_DUPLICATES} ]]; then - _collect_file_info "${file_check_json}" || return 1 - _clear_line 1 - "${QUIET:-_print_center}" "justify" "${print_name}" " already exists." "=" && return 0 - else - _print_center "justify" "Overwriting file.." "-" - { _file_id="$(_json_value id 1 1 <<< "${file_check_json}")" && - clone_file_post_data="$(_drive_info "${_file_id}" "parents,writersCanShare")"; } || - { _error_logging_upload "${print_name}" "${post_data:-${file_check_json}}" || return 1; } - if [[ ${_file_id} != "${file_id}" ]]; then - _api_request -s \ - -X DELETE \ - "${API_URL}/drive/${API_VERSION}/files/${_file_id}?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>| /dev/null 1>&2 || : - STRING="Updated" - else - _collect_file_info "${file_check_json}" || return 1 - fi - fi - else - "${EXTRA_LOG}" "justify" "Cloning file.." "-" - fi - else - "${EXTRA_LOG}" "justify" "Cloning file.." "-" - fi - - # shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic. - clone_file_response="$(_api_request ${CURL_PROGRESS} \ - -X POST \ - -H "Content-Type: application/json; charset=UTF-8" \ - -d "${clone_file_post_data}" \ - "${API_URL}/drive/${API_VERSION}/files/${file_id}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" - for _ in 1 2 3; do _clear_line 1; done - _collect_file_info "${clone_file_response}" || return 1 - "${QUIET:-_print_center}" "justify" "${print_name} " "| ${readable_size} | ${STRING}" "=" - return 0 -} - -################################################### -# Create/Check directory in google drive. -# Globals: 3 variables, 3 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value, _json_escape -# Arguments: 2 -# ${1} = dir name -# ${2} = root dir id of given dir -# Result: print folder id -# Reference: -# https://developers.google.com/drive/api/v3/folder -################################################### -_create_directory() { - [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare dirname="${1##*/}" escaped_dirname rootdir="${2}" query search_response folder_id - escaped_dirname="$(_json_escape j "${dirname}")" print_dirname="$(_json_escape p "${dirname}")" - - "${EXTRA_LOG}" "justify" "Creating gdrive folder:" " ${print_dirname}" "-" 1>&2 - query="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name=\"${escaped_dirname}\" and trashed=false and '${rootdir}' in parents")" - - search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - - if ! folder_id="$(printf "%s\n" "${search_response}" | _json_value id 1 1)"; then - declare create_folder_post_data create_folder_response - create_folder_post_data="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${escaped_dirname}\",\"parents\": [\"${rootdir}\"]}" - create_folder_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - -X POST \ - -H "Content-Type: application/json; charset=UTF-8" \ - -d "${create_folder_post_data}" \ - "${API_URL}/drive/${API_VERSION}/files?fields=id&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - fi - _clear_line 1 1>&2 - - { folder_id="${folder_id:-$(_json_value id 1 1 <<< "${create_folder_response}")}" && printf "%s\n" "${folder_id}"; } || - { printf "%s\n" "${create_folder_response}" 1>&2 && return 1; } - return 0 -} - -################################################### -# Get information for a gdrive folder/file. -# Globals: 3 variables, 1 function -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _json_value -# Arguments: 2 -# ${1} = folder/file gdrive id -# ${2} = information to fetch, e.g name, id -# Result: On -# Success - print fetched value -# Error - print "message" field from the json -# Reference: -# https://developers.google.com/drive/api/v3/search-files -################################################### -_drive_info() { - [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare folder_id="${1}" fetch="${2}" search_response - - "${EXTRA_LOG}" "justify" "Fetching info.." "-" 1>&2 - search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - "${API_URL}/drive/${API_VERSION}/files/${folder_id}?fields=${fetch}&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - printf "%b" "${search_response:+${search_response}\n}" - return 0 -} - -################################################### -# Extract ID from a googledrive folder/file url. -# Globals: None -# Arguments: 1 -# ${1} = googledrive folder/file url. -# Result: print extracted ID -################################################### -_extract_id() { - [[ $# = 0 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare LC_ALL=C ID="${1}" - case "${ID}" in - *'drive.google.com'*'id='*) ID="${ID##*id=}" && ID="${ID%%\?*}" && ID="${ID%%\&*}" ;; - *'drive.google.com'*'file/d/'* | 'http'*'docs.google.com'*'/d/'*) ID="${ID##*\/d\/}" && ID="${ID%%\/*}" && ID="${ID%%\?*}" && ID="${ID%%\&*}" ;; - *'drive.google.com'*'drive'*'folders'*) ID="${ID##*\/folders\/}" && ID="${ID%%\?*}" && ID="${ID%%\&*}" ;; - esac - printf "%b" "${ID:+${ID}\n}" -} - -################################################### -# Upload ( Create/Update ) files on gdrive. -# Interrupted uploads can be resumed. -# Globals: 8 variables, 11 functions -# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _url_encode, _json_value, _json_escape _print_center, _bytes_to_human, _check_existing_file -# _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session -# _full_upload, _collect_file_info -# Arguments: 3 -# ${1} = update or upload ( upload type ) -# ${2} = file to upload -# ${3} = root dir id for file -# Result: On -# Success - Upload/Update file and export FILE_ID -# Error - return 1 -# Reference: -# https://developers.google.com/drive/api/v3/create-file -# https://developers.google.com/drive/api/v3/manage-uploads -# https://developers.google.com/drive/api/v3/reference/files/update -################################################### -_upload_file() { - [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare job="${1}" input="${2}" folder_id="${3}" \ - slug escaped_slug inputname extension inputsize readable_size request_method url postdata uploadlink upload_body mime_type description \ - resume_args1 resume_args2 resume_args3 - - slug="${input##*/}" escaped_slug="$(_json_escape j "${slug}")" print_slug="$(_json_escape p "${slug}")" - inputname="${slug%.*}" - extension="${slug##*.}" - inputsize="$(($(wc -c < "${input}")))" && content_length="${inputsize}" - readable_size="$(_bytes_to_human "${inputsize}")" - - # Handle extension-less files - [[ ${inputname} = "${extension}" ]] && declare mime_type && { - mime_type="$(file --brief --mime-type "${input}" || mimetype --output-format %m "${input}")" 2>| /dev/null || { - "${QUIET:-_print_center}" "justify" "Error: file or mimetype command not found." "=" && printf "\n" - exit 1 - } - } - - # create description data - [[ -n ${DESCRIPTION_FILE} ]] && { - : "${DESCRIPTION_FILE//%f/${slug}}" && : "${_//%s/${inputsize}}" && : "${_//%m/${mime_type}}" - description="$(_json_escape j "${_}")" # escape for json - } - - _print_center "justify" "${print_slug}" " | ${readable_size}" "=" - - # Set proper variables for overwriting files - [[ ${job} = update ]] && { - declare file_check_json check_value - case "${CHECK_MODE}" in - 2) check_value_type="size" check_value="${inputsize}" ;; - 3) - check_value_type="md5Checksum" - check_value="$(md5sum "${input}")" || { - "${QUIET:-_print_center}" "justify" "Error: cannot calculate md5sum of given file." "=" 1>&2 - return 1 - } - check_value="${check_value%% *}" - ;; - esac - # Check if file actually exists, and create if not. - if file_check_json="$(_check_existing_file "${escaped_slug}" "${folder_id}" "${check_value_type}" "${check_value}")"; then - if [[ -n ${SKIP_DUPLICATES} ]]; then - # Stop upload if already exists ( -d/--skip-duplicates ) - _collect_file_info "${file_check_json}" "${escaped_slug}" || return 1 - _clear_line 1 - "${QUIET:-_print_center}" "justify" "${print_slug}" " already exists." "=" && return 0 - else - request_method="PATCH" - _file_id="$(_json_value id 1 1 <<< "${file_check_json}")" || - { _error_logging_upload "${print_slug}" "${file_check_json}" || return 1; } - url="${API_URL}/upload/drive/${API_VERSION}/files/${_file_id}?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" - # JSON post data to specify the file name and folder under while the file to be updated - postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${escaped_slug}\",\"addParents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" - STRING="Updated" - fi - else - job="create" - fi - } - - # Set proper variables for creating files - [[ ${job} = create ]] && { - url="${API_URL}/upload/drive/${API_VERSION}/files?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" - request_method="POST" - # JSON post data to specify the file name and folder under while the file to be created - postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${escaped_slug}\",\"parents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" - STRING="Uploaded" - } - - __file="${HOME}/.google-drive-upload/${print_slug}__::__${folder_id}__::__${inputsize}" - # https://developers.google.com/drive/api/v3/manage-uploads - if [[ -r "${__file}" ]]; then - uploadlink="$(< "${__file}")" - http_code="$(curl --compressed -s -X PUT "${uploadlink}" -o /dev/null --write-out %"{http_code}")" || : - case "${http_code}" in - 308) # Active Resumable URI give 308 status - uploaded_range="$(: "$(curl --compressed -s -X PUT \ - -H "Content-Range: bytes */${inputsize}" \ - --url "${uploadlink}" --globoff -D - || :)" && - : "$(printf "%s\n" "${_/*[R,r]ange: bytes=0-/}")" && read -r firstline <<< "$_" && printf "%s\n" "${firstline//$'\r'/}")" - if [[ ${uploaded_range} -gt 0 ]]; then - _print_center "justify" "Resuming interrupted upload.." "-" && _newline "\n" - content_range="$(printf "bytes %s-%s/%s\n" "$((uploaded_range + 1))" "$((inputsize - 1))" "${inputsize}")" - content_length="$((inputsize - $((uploaded_range + 1))))" - # Resuming interrupted uploads needs http1.1 - resume_args1='-s' resume_args2='--http1.1' resume_args3="Content-Range: ${content_range}" - _upload_file_from_uri _clear_line - _collect_file_info "${upload_body}" "${print_slug}" || return 1 - _normal_logging_upload - _remove_upload_session - else - _full_upload || return 1 - fi - ;; - 201 | 200) # Completed Resumable URI give 20* status - upload_body="${http_code}" - _collect_file_info "${upload_body}" "${print_slug}" || return 1 - _normal_logging_upload - _remove_upload_session - ;; - 4[0-9][0-9] | 000 | *) # Dead Resumable URI give 40* status - _full_upload || return 1 - ;; - esac - else - _full_upload || return 1 - fi - return 0 -} - -################################################### -# Sub functions for _upload_file function - Start -# generate resumable upload link -_generate_upload_link() { - "${EXTRA_LOG}" "justify" "Generating upload link.." "-" 1>&2 - uploadlink="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - -X "${request_method}" \ - -H "Content-Type: application/json; charset=UTF-8" \ - -H "X-Upload-Content-Type: ${mime_type}" \ - -H "X-Upload-Content-Length: ${inputsize}" \ - -d "$postdata" \ - "${url}" \ - -D - || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - case "${uploadlink}" in - *'ocation: '*'upload_id'*) uploadlink="$(read -r firstline <<< "${uploadlink/*[L,l]ocation: /}" && printf "%s\n" "${firstline//$'\r'/}")" && return 0 ;; - '' | *) return 1 ;; - esac - - return 0 -} - -# Curl command to push the file to google drive. -_upload_file_from_uri() { - _print_center "justify" "Uploading.." "-" - # shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic. - upload_body="$(_api_request ${CURL_PROGRESS} \ - -X PUT \ - -H "Content-Type: ${mime_type}" \ - -H "Content-Length: ${content_length}" \ - -H "Slug: ${print_slug}" \ - -T "${input}" \ - -o- \ - --url "${uploadlink}" \ - --globoff \ - ${CURL_SPEED} ${resume_args1} ${resume_args2} \ - -H "${resume_args3}" || :)" - [[ -z ${VERBOSE_PROGRESS} ]] && for _ in 1 2; do _clear_line 1; done && "${1:-:}" - return 0 -} - -# logging in case of successful upload -_normal_logging_upload() { - [[ -z ${VERBOSE_PROGRESS} ]] && _clear_line 1 - "${QUIET:-_print_center}" "justify" "${print_slug} " "| ${readable_size} | ${STRING}" "=" - return 0 -} - -# Tempfile Used for resuming interrupted uploads -_log_upload_session() { - [[ ${inputsize} -gt 1000000 ]] && printf "%s\n" "${uploadlink}" >| "${__file}" - return 0 -} - -# remove upload session -_remove_upload_session() { - rm -f "${__file}" - return 0 -} - -# wrapper to fully upload a file from scratch -_full_upload() { - _generate_upload_link || { _error_logging_upload "${print_slug}" "${uploadlink}" || return 1; } - _log_upload_session - _upload_file_from_uri - _collect_file_info "${upload_body}" "${print_slug}" || return 1 - _normal_logging_upload - _remove_upload_session - return 0 -} -# Sub functions for _upload_file function - End -################################################### - -################################################### -# Share a gdrive file/folder -# Globals: 3 variables, 4 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value, _print_center, _clear_line -# Arguments: 2 -# ${1} = gdrive ID of folder/file -# ${2} = Email to which file will be shared ( optional ) -# Result: read description -# Reference: -# https://developers.google.com/drive/api/v3/manage-sharing -################################################### -_share_id() { - [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare id="${1}" role="${2:?Missing role}" share_email="${3}" - declare type="${share_email:+user}" share_post_data share_post_data share_response - - "${EXTRA_LOG}" "justify" "Sharing.." "-" 1>&2 - share_post_data="{\"role\":\"${role}\",\"type\":\"${type:-anyone}\"${share_email:+,\"emailAddress\":\"${share_email}\"}}" - - share_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - -X POST \ - -H "Content-Type: application/json; charset=UTF-8" \ - -d "${share_post_data}" \ - "${API_URL}/drive/${API_VERSION}/files/${id}/permissions?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - { _json_value id 1 1 <<< "${share_response}" 2>| /dev/null 1>&2 && return 0; } || - { printf "%s\n" "Error: Cannot Share." 1>&2 && printf "%s\n" "${share_response}" 1>&2 && return 1; } -} - -export -f _check_existing_file \ - _clone_file \ - _create_directory \ - _drive_info \ - _extract_id \ - _upload_file \ - _generate_upload_link \ - _upload_file_from_uri \ - _normal_logging_upload \ - _log_upload_session \ - _remove_upload_session \ - _full_upload \ - _share_id diff --git a/bash/google-oauth2.bash b/bash/google-oauth2.bash deleted file mode 100755 index ac16c7b..0000000 --- a/bash/google-oauth2.bash +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env bash -# shellcheck source=/dev/null - -set -o errexit -o noclobber -o pipefail - -_usage() { - printf "%s\n" " -A simple curl OAuth2 authenticator for google drive. Utilizing api v3. - -Usage: - - ./${0##*/} create - authenticates a new user with a fresh client id and secret. - - ./${0##*/} add - authenticates a new user but will use the client id and secret if available. If not, then same as create flag. - - ./${0##*/} refresh - gets a new access token. Make sure CLIENT_SECRET, CLIENT_ID and REFRESH_TOKEN is exported as an environment variable or CONFIG - - ./${0##*/} help - show this help. - -Make sure to export CONFIG as an environment variable if you want to use save new changes or want to use values from it. It should be in the format required by gupload. - -Variable names - CLIENT_SECRET, CLIENT_ID and REFRESH_TOKEN - -You can also export CLIENT_SECRET, CLIENT_ID and REFRESH_TOKEN as an environment variable if you don't want to use above method." - exit 0 -} - -UTILS_FOLDER="${UTILS_FOLDER:-$(pwd)}" -{ . "${UTILS_FOLDER}"/common-utils.bash && . "${UTILS_FOLDER}"/auth-utils.bash; } || { printf "Error: Unable to source util files.\n" && exit 1; } - -[[ $# = 0 ]] && _usage - -_check_debug - -_cleanup() { - # unhide the cursor if hidden - [[ -n ${SUPPORT_ANSI_ESCAPES} ]] && printf "\033[?25h\033[?7h" - { - # grab all script children pids - script_children_pids="$(ps --ppid="${MAIN_PID}" -o pid=)" - - # kill all grabbed children processes - # shellcheck disable=SC2086 - kill ${script_children_pids} 1>| /dev/null - - export abnormal_exit && if [[ -n ${abnormal_exit} ]]; then - printf "\n\n%s\n" "Script exited manually." - kill -- -$$ & - fi - } 2>| /dev/null || : - return 0 -} - -trap 'abnormal_exit="1"; exit' INT TERM -trap '_cleanup' EXIT -trap '' TSTP # ignore ctrl + z - -export MAIN_PID="$$" - -export API_URL="https://www.googleapis.com" -export API_VERSION="v3" \ - SCOPE="${API_URL}/auth/drive" \ - REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" \ - TOKEN_URL="https://accounts.google.com/o/oauth2/token" - -# the credential functions require a config file to update, so just provide /dev/null if CONFIG variable is not exported -export CONFIG="${CONFIG:-"/dev/null"}" -_reload_config || return 1 - -case "${1}" in - help) _usage ;; - create) unset CLIENT_SECRET CLIENT_ID REFRESH_TOKEN ACCESS_TOKEN && CREATE_ACCOUNT="true" ;; - add) unset REFRESH_TOKEN ACCESS_TOKEN && CREATE_ACCOUNT="true" ;; - refresh) - unset ACCESS_TOKEN - [[ -z ${CLIENT_ID} ]] && printf "%s\n" "Missing CLIENT_ID variable, make sure to export to use refresh option." && _usage - [[ -z ${CLIENT_SECRET} ]] && printf "%s\n" "Missing CLIENT_SECRET variable, make sure to export to use refresh option." && _usage - [[ -z ${REFRESH_TOKEN} ]] && printf "%s\n" "Missing REFRESH_TOKEN variable, make sure to export to use refresh option." && _usage - ;; -esac - -_check_account_credentials || exit 1 -[[ -n ${CREATE_ACCOUNT} ]] && printf "Refresh Token: %s\n\n" "${REFRESH_TOKEN}" 1>&2 -printf "Access Token: %s\n" "${ACCESS_TOKEN}" 1>&2 -exit 0 diff --git a/bash/release/gsync b/bash/release/gsync deleted file mode 100755 index 9b00f0d..0000000 --- a/bash/release/gsync +++ /dev/null @@ -1,1186 +0,0 @@ -#!/usr/bin/env bash -SELF_SOURCE="true" -# Functions that will used in core script - -################################################### -# Convert bytes to human readable form -# Globals: None -# Required Arguments: 1 -# ${1} = Positive integer ( bytes ) -# Result: Print human readable form. -# Reference: -# https://unix.stackexchange.com/a/259254 -################################################### -_bytes_to_human() { - declare b=${1:-0} d='' s=0 S=(Bytes {K,M,G,T,P,E,Y,Z}B) - while ((b > 1024)); do - d="$(printf ".%02d" $((b % 1024 * 100 / 1024)))" - b=$((b / 1024)) && ((s++)) - done - printf "%s\n" "${b}${d} ${S[${s}]}" -} - -################################################### -# Check for bash version >= 4.x -# Globals: 1 Variable -# BASH_VERSINFO -# Required Arguments: None -# Result: If -# SUCEESS: Status 0 -# ERROR: print message and exit 1 -################################################### -_check_bash_version() { - { ! [[ ${BASH_VERSINFO:-0} -ge 4 ]] && printf "Bash version lower than 4.x not supported.\n" && exit 1; } || : -} - -################################################### -# Check if debug is enabled and enable command trace -# Globals: 2 variables, 1 function -# Varibles - DEBUG, QUIET -# Function - _is_terminal -# Arguments: None -# Result: If DEBUG -# Present - Enable command trace and change print functions to avoid spamming. -# Absent - Disable command trace -# Check QUIET, then check terminal size and enable print functions accordingly. -################################################### -_check_debug() { - if [[ -n ${DEBUG} ]]; then - set -x && PS4='-> ' - _print_center() { { [[ $# = 3 ]] && printf "%s\n" "${2}"; } || { printf "%s%s\n" "${2}" "${3}"; }; } - _clear_line() { :; } && _newline() { :; } - else - if [[ -z ${QUIET} ]]; then - if _support_ansi_escapes; then - # This refreshes the interactive shell so we can use the ${COLUMNS} variable in the _print_center function. - shopt -s checkwinsize && (: && :) - if [[ ${COLUMNS} -lt 45 ]]; then - _print_center() { { [[ $# = 3 ]] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - else - trap 'shopt -s checkwinsize; (:;:)' SIGWINCH - fi - export CURL_PROGRESS="-#" EXTRA_LOG="_print_center" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" - else - _print_center() { { [[ $# = 3 ]] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - _clear_line() { :; } - fi - _newline() { printf "%b" "${1}"; } - else - _print_center() { :; } && _clear_line() { :; } && _newline() { :; } - fi - set +x - fi - export -f _print_center _clear_line _newline -} - -################################################### -# Check internet connection. -# Probably the fastest way, takes about 1 - 2 KB of data, don't check for more than 10 secs. -# Globals: 3 functions -# _print_center, _clear_line, _timeout -# Arguments: None -# Result: On -# Success - Nothing -# Error - print message and exit 1 -################################################### -_check_internet() { - "${EXTRA_LOG}" "justify" "Checking Internet Connection.." "-" - if ! _timeout 10 curl -Is google.com; then - _clear_line 1 - "${QUIET:-_print_center}" "justify" "Error: Internet connection" " not available." "=" - return 1 - fi - _clear_line 1 -} - -################################################### -# Move cursor to nth no. of line and clear it to the begining. -# Globals: None -# Arguments: 1 -# ${1} = Positive integer ( line number ) -# Result: Read description -################################################### -_clear_line() { - printf "\033[%sA\033[2K" "${1}" -} - -################################################### -# Alternative to wc -l command -# Globals: None -# Arguments: 1 or pipe -# ${1} = file, _count < file -# variable, _count <<< variable -# pipe = echo something | _count -# Result: Read description -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#get-the-number-of-lines-in-a-file -################################################### -_count() { - mapfile -tn 0 lines - printf '%s\n' "${#lines[@]}" -} - -################################################### -# Alternative to dirname command -# Globals: None -# Arguments: 1 -# ${1} = path of file or folder -# Result: read description -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#get-the-directory-name-of-a-file-path -################################################### -_dirname() { - declare tmp=${1:-.} - - [[ ${tmp} != *[!/]* ]] && { printf '/\n' && return; } - tmp="${tmp%%"${tmp##*[!/]}"}" - - [[ ${tmp} != */* ]] && { printf '.\n' && return; } - tmp=${tmp%/*} && tmp="${tmp%%"${tmp##*[!/]}"}" - - printf '%s\n' "${tmp:-/}" -} - -################################################### -# Convert given time in seconds to readable form -# 110 to 1 minute(s) and 50 seconds -# Globals: None -# Arguments: 1 -# ${1} = Positive Integer ( time in seconds ) -# Result: read description -# Reference: -# https://stackoverflow.com/a/32164707 -################################################### -_display_time() { - declare T="${1}" - declare DAY="$((T / 60 / 60 / 24))" HR="$((T / 60 / 60 % 24))" MIN="$((T / 60 % 60))" SEC="$((T % 60))" - [[ ${DAY} -gt 0 ]] && printf '%d days ' "${DAY}" - [[ ${HR} -gt 0 ]] && printf '%d hrs ' "${HR}" - [[ ${MIN} -gt 0 ]] && printf '%d minute(s) ' "${MIN}" - [[ ${DAY} -gt 0 || ${HR} -gt 0 || ${MIN} -gt 0 ]] && printf 'and ' - printf '%d seconds\n' "${SEC}" -} - -################################################### -# Fetch latest commit sha of release or branch -# Do not use github rest api because rate limit error occurs -# Globals: None -# Arguments: 3 -# ${1} = "branch" or "release" -# ${2} = branch name or release name -# ${3} = repo name e.g labbots/google-drive-upload -# Result: print fetched sha -################################################### -_get_latest_sha() { - declare LATEST_SHA - case "${1:-${TYPE}}" in - branch) - LATEST_SHA="$( - : "$(curl --compressed -s https://github.com/"${3:-${REPO}}"/commits/"${2:-${TYPE_VALUE}}".atom -r 0-2000)" - : "$(printf "%s\n" "${_}" | grep -o "Commit\\/.*<" -m1 || :)" && : "${_##*\/}" && printf "%s\n" "${_%%<*}" - )" - ;; - release) - LATEST_SHA="$( - : "$(curl -L --compressed -s https://github.com/"${3:-${REPO}}"/releases/"${2:-${TYPE_VALUE}}")" - : "$(printf "%s\n" "${_}" | grep "=\"/""${3:-${REPO}}""/commit" -m1 || :)" && : "${_##*commit\/}" && printf "%s\n" "${_%%\"*}" - )" - ;; - esac - printf "%b" "${LATEST_SHA:+${LATEST_SHA}\n}" -} - -################################################### -# Encode the given string to parse properly as json -# Globals: None -# Arguments: 2 -# ${1} = json or something else -# ${2} = input -# Result: if ${1} is j, then escape all chars, else only special chars -# Reference: -# https://tools.ietf.org/html/rfc7159#section-7 -################################################### -_json_escape() { - declare mode="${1:?}" input="${2:?Provide Input}" - [[ ${mode} = "j" ]] && { - # \ and / - : "${input//\\/\\\\}" - : "${_//\//\\\/}" - # : "${_//\'/\\\'}" # ' (not strictly needed ?) - input="${_//\"/\\\"}" # " - } - : "${input//$'\t'/\\\t}" # \t (tab) - : "${_//$'\n'/\\\n}" # \n (newline) - : "${_//$'\r'/\\\r}" # \r (carriage return) - : "${_//$'\f'/\\\f}" # \f (form feed) - : "${_//$'\b'/\\\b}" # \b (backspace) - printf "%s" "${_}" -} - -################################################### -# Method to extract specified field data from json -# Globals: None -# Arguments: 2 -# ${1} - value of field to fetch from json -# ${2} - Optional, no of lines to parse for the given field in 1st arg -# ${3} - Optional, nth number of value from extracted values, default it 1. -# Input: file | here string | pipe -# _json_value "Arguments" < file -# _json_value "Arguments" <<< "${varibale}" -# echo something | _json_value "Arguments" -# Result: print extracted value -################################################### -_json_value() { - declare num _tmp no_of_lines - { [[ ${2} -gt 0 ]] && no_of_lines="${2}"; } || : - { [[ ${3} -gt 0 ]] && num="${3}"; } || { [[ ${3} != all ]] && num=1; } - # shellcheck disable=SC2086 - _tmp="$(grep -o "\"${1}\"\:.*" ${no_of_lines:+-m} ${no_of_lines})" || return 1 - printf "%s\n" "${_tmp}" | sed -e "s/.*\"""${1}""\"://" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num}"p || : -} - -################################################### -# Print a text to center interactively and fill the rest of the line with text specified. -# This function is fine-tuned to this script functionality, so may appear unusual. -# Globals: 1 variable -# COLUMNS -# Arguments: 4 -# If ${1} = normal -# ${2} = text to print -# ${3} = symbol -# If ${1} = justify -# If remaining arguments = 2 -# ${2} = text to print -# ${3} = symbol -# If remaining arguments = 3 -# ${2}, ${3} = text to print -# ${4} = symbol -# Result: read description -# Reference: -# https://gist.github.com/TrinityCoder/911059c83e5f7a351b785921cf7ecda -################################################### -_print_center() { - [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare -i TERM_COLS="${COLUMNS}" - declare type="${1}" filler - case "${type}" in - normal) declare out="${2}" && symbol="${3}" ;; - justify) - if [[ $# = 3 ]]; then - declare input1="${2}" symbol="${3}" TO_PRINT out - TO_PRINT="$((TERM_COLS - 5))" - { [[ ${#input1} -gt ${TO_PRINT} ]] && out="[ ${input1:0:TO_PRINT}..]"; } || { out="[ ${input1} ]"; } - else - declare input1="${2}" input2="${3}" symbol="${4}" TO_PRINT temp out - TO_PRINT="$((TERM_COLS * 47 / 100))" - { [[ ${#input1} -gt ${TO_PRINT} ]] && temp+=" ${input1:0:TO_PRINT}.."; } || { temp+=" ${input1}"; } - TO_PRINT="$((TERM_COLS * 46 / 100))" - { [[ ${#input2} -gt ${TO_PRINT} ]] && temp+="${input2:0:TO_PRINT}.. "; } || { temp+="${input2} "; } - out="[${temp}]" - fi - ;; - *) return 1 ;; - esac - - declare -i str_len=${#out} - [[ $str_len -ge $((TERM_COLS - 1)) ]] && { - printf "%s\n" "${out}" && return 0 - } - - declare -i filler_len="$(((TERM_COLS - str_len) / 2))" - [[ $# -ge 2 ]] && ch="${symbol:0:1}" || ch=" " - for ((i = 0; i < filler_len; i++)); do - filler="${filler}${ch}" - done - - printf "%s%s%s" "${filler}" "${out}" "${filler}" - [[ $(((TERM_COLS - str_len) % 2)) -ne 0 ]] && printf "%s" "${ch}" - printf "\n" - - return 0 -} - -################################################### -# Quiet version of _print_center -################################################### -_print_center_quiet() { - { [[ $# = 3 ]] && printf "%s\n" "${2}"; } || printf "%s%s\n" "${2}" "${3}" -} - -################################################### -# Evaluates value1=value2 -# Globals: None -# Arguments: 3 -# ${1} = direct ( d ) or indirect ( i ) - ( evaluation mode ) -# ${2} = var name -# ${3} = var value -# Result: export value1=value2 -################################################### -_set_value() { - case "${1:?}" in - d | direct) export "${2:?}=${3}" ;; - i | indirect) export "${2:?}=${!3}" ;; - esac -} - -################################################### -# Check if script terminal supports ansi escapes -# Globals: 1 variable -# TERM -# Arguments: None -# Result: return 1 or 0 -################################################### -_support_ansi_escapes() { - { [[ -t 2 && -n ${TERM} && ${TERM} =~ (xterm|rxvt|urxvt|linux|vt|screen) ]] && return 0; } || return 1 -} - -################################################### -# Alternative to timeout command -# Globals: None -# Arguments: 1 and rest -# ${1} = amount of time to sleep -# rest = command to execute -# Result: Read description -# Reference: -# https://stackoverflow.com/a/24416732 -################################################### -_timeout() { - declare timeout="${1:?Error: Specify Timeout}" && shift - { - "${@}" & - child="${!}" - trap -- "" TERM - { - sleep "${timeout}" - kill -9 "${child}" - } & - wait "${child}" - } 2>| /dev/null 1>&2 -} - -################################################### -# Config updater -# Incase of old value, update, for new value add. -# Globals: None -# Arguments: 3 -# ${1} = value name -# ${2} = value -# ${3} = config path -# Result: read description -################################################### -_update_config() { - [[ $# -lt 3 ]] && printf "Missing arguments\n" && return 1 - declare value_name="${1}" value="${2}" config_path="${3}" - ! [ -f "${config_path}" ] && : >| "${config_path}" # If config file doesn't exist. - chmod u+w "${config_path}" || return 1 - printf "%s\n%s\n" "$(grep -v -e "^$" -e "^${value_name}=" "${config_path}" || :)" \ - "${value_name}=\"${value}\"" >| "${config_path}" || return 1 - chmod a-w-r-x,u+r "${config_path}" || return 1 - return 0 -} - -################################################### -# Encode the given string to parse properly in network requests -# Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#percent-encode-a-string -################################################### -_url_encode() { - declare LC_ALL=C - for ((i = 0; i < ${#1}; i++)); do - : "${1:i:1}" - case "${_}" in - [a-zA-Z0-9.~_-]) - printf '%s' "${_}" - ;; - *) - printf '%%%02X' "'${_}" - ;; - esac - done 2>| /dev/null - printf '\n' -} - -export -f _bytes_to_human \ - _check_bash_version \ - _check_debug \ - _check_internet \ - _clear_line \ - _count \ - _dirname \ - _display_time \ - _get_latest_sha \ - _json_escape \ - _json_value \ - _print_center \ - _print_center_quiet \ - _set_value \ - _support_ansi_escapes \ - _timeout \ - _update_config \ - _url_encode -# Sync a FOLDER to google drive forever using labbots/google-drive-upload -# shellcheck source=/dev/null - -_usage() { - printf "%b" " -The script can be used to sync your local folder to google drive. - -Utilizes google-drive-upload bash scripts.\n -Usage: ${0##*/} [options.. ]\n -Options:\n - -d | --directory - Gdrive foldername.\n - -k | --kill - to kill the background job using pid number ( -p flags ) or used with input, can be used multiple times.\n - -j | --jobs - See all background jobs that were started and still running.\n - Use --jobs v/verbose to more information for jobs.\n - -p | --pid - Specify a pid number, used for --jobs or --kill or --info flags, can be used multiple times.\n - -i | --info - See information about a specific sync using pid_number ( use -p flag ) or use with input, can be used multiple times.\n - -t | --time - Amount of time to wait before try to sync again in background.\n - To set wait time by default, use ${0##*/} -t default='3'. Replace 3 with any positive integer.\n - -l | --logs - To show the logs after starting a job or show log of existing job. Can be used with pid number ( -p flag ). - Note: If multiple pid numbers or inputs are used, then will only show log of first input as it goes on forever. - -a | --arguments - Additional arguments for gupload commands. e.g: ${0##*/} -a '-q -o -p 4 -d'.\n - To set some arguments by default, use ${0##*/} -a default='-q -o -p 4 -d'.\n - -fg | --foreground - This will run the job in foreground and show the logs.\n - -in | --include 'pattern' - Only include the files with the given pattern to upload.\n - e.g: ${0##*/} local_folder --include "*1*", will only include with files with pattern '1' in the name.\n - -ex | --exclude 'pattern' - Exclude the files with the given pattern from uploading.\n - e.g: ${0##*/} local_folder --exclude "*1*", will exclude all files with pattern '1' in the name.\n - -c | --command 'command name'- Incase if gupload command installed with any other name or to use in systemd service.\n - --sync-detail-dir 'dirname' - Directory where a job information will be stored. - Default: ${HOME}/.google-drive-upload\n - -s | --service 'service name' - To generate systemd service file to setup background jobs on boot.\n - -D | --debug - Display script command trace, use before all the flags to see maximum script trace.\n - -h | --help - Display usage instructions.\n" - exit 0 -} - -_short_help() { - printf "No valid arguments provided, use -h/--help flag to see usage.\n" - exit 0 -} - -################################################### -# Check if a pid exists by using ps -# Globals: None -# Arguments: 1 -# ${1} = pid number of a sync job -# Result: return 0 or 1 -################################################### -_check_pid() { - { ps -p "${1}" 2>| /dev/null 1>&2 && return 0; } || return 1 -} - -################################################### -# Show information about a specific sync job -# Globals: 1 variable, 2 functions -# Variable - SYNC_LIST -# Functions - _check_pid, _setup_loop_variables -# Arguments: 1 -# ${1} = pid number of a sync job -# ${2} = anything: Prints extra information ( optional ) -# ${3} = all information about a job ( optional ) -# Result: show job info and set RETURN_STATUS -################################################### -_get_job_info() { - declare input local_folder pid times extra - pid="${1}" && input="${3:-$(grep "${pid}" "${SYNC_LIST}" || :)}" - - if [[ -n ${input} ]]; then - if times="$(ps -p "${pid}" -o etimes --no-headers)"; then - printf "\n%s\n" "PID: ${pid}" - : "${input#*"|:_//_:|"}" && local_folder="${_%%"|:_//_:|"*}" - - printf "Local Folder: %s\n" "${local_folder}" - printf "Drive Folder: %s\n" "${input##*"|:_//_:|"}" - printf "Running Since: %s\n" "$(_display_time "${times}")" - - [[ -n ${2} ]] && { - extra="$(ps -p "${pid}" -o %cpu,%mem --no-headers || :)" - printf "CPU usage:%s\n" "${extra% *}" - printf "Memory usage: %s\n" "${extra##* }" - _setup_loop_variables "${local_folder}" "${input##*"|:_//_:|"}" - printf "Success: %s\n" "$(_count < "${SUCCESS_LOG}")" - printf "Failed: %s\n" "$(_count < "${ERROR_LOG}")" - } - RETURN_STATUS=0 - else - RETURN_STATUS=1 - fi - else - RETURN_STATUS=11 - fi - return 0 -} - -################################################### -# Remove a sync job information from database -# Globals: 2 variables -# SYNC_LIST, SYNC_DETAIL_DIR -# Arguments: 1 -# ${1} = pid number of a sync job -# Result: read description -################################################### -_remove_job() { - declare pid="${1}" input local_folder drive_folder new_list - input="$(grep "${pid}" "${SYNC_LIST}" || :)" - - if [ -n "${pid}" ]; then - : "${input##*"|:_//_:|"}" && local_folder="${_%%"|:_//_:|"*}" - drive_folder="${input##*"|:_//_:|"}" - new_list="$(grep -v "${pid}" "${SYNC_LIST}" || :)" - printf "%s\n" "${new_list}" >| "${SYNC_LIST}" - fi - - rm -rf "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-${2}}${local_folder_remove_job:-${3}}" - # Cleanup dir if empty - { [[ -z $(find "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-${2}}" -type f || :) ]] && rm -rf "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-${2}}"; } 2>| /dev/null 1>&2 - return 0 -} - -################################################### -# Kill a sync job and do _remove_job -# Globals: 1 function -# _remove_job -# Arguments: 1 -# ${1} = pid number of a sync job -# Result: read description -################################################### -_kill_job() { - declare pid="${1}" - kill -9 "${pid}" 2>| /dev/null 1>&2 || : - _remove_job "${pid}" - printf "Killed.\n" -} - -################################################### -# Show total no of sync jobs running -# Globals: 1 variable, 2 functions -# Variable - SYNC_LIST -# Functions - _get_job_info, _remove_job -# Arguments: 1 -# ${1} = v/verbose: Prints extra information ( optional ) -# Result: read description -################################################### -_show_jobs() { - declare list pid total=0 - list="$(grep -v '^$' "${SYNC_LIST}" || :)" - printf "%s\n" "${list}" >| "${SYNC_LIST}" - - while read -r -u 4 line; do - if [[ -n ${line} ]]; then - : "${line%%"|:_//_:|"*}" && pid="${_##*: }" - _get_job_info "${pid}" "${1}" "${line}" - { [[ ${RETURN_STATUS} = 1 ]] && _remove_job "${pid}"; } || { ((total += 1)) && no_task="printf"; } - fi - done 4< "${SYNC_LIST}" - - printf "\nTotal Jobs Running: %s\n" "${total}" - [[ -z ${1} ]] && "${no_task:-:}" "For more info: %s -j/--jobs v/verbose\n" "${0##*/}" - return 0 -} - -################################################### -# Setup required variables for a sync job -# Globals: 1 Variable -# SYNC_DETAIL_DIR -# Arguments: 1 -# ${1} = Local folder name which will be synced -# Result: read description -################################################### -_setup_loop_variables() { - declare folder="${1}" drive_folder="${2}" - DIRECTORY="${SYNC_DETAIL_DIR}/${drive_folder}${folder}" - PID_FILE="${DIRECTORY}/pid" - SUCCESS_LOG="${DIRECTORY}/success_list" - ERROR_LOG="${DIRECTORY}/failed_list" - LOGS="${DIRECTORY}/logs" -} - -################################################### -# Create folder and files for a sync job -# Globals: 4 variables -# DIRECTORY, PID_FILE, SUCCESS_LOG, ERROR_LOG -# Arguments: None -# Result: read description -################################################### -_setup_loop_files() { - mkdir -p "${DIRECTORY}" - for file in PID_FILE SUCCESS_LOG ERROR_LOG; do - printf "" >> "${!file}" - done - PID="$(< "${PID_FILE}")" -} - -################################################### -# Check for new files in the sync folder and upload it -# A list is generated everytime, success and error. -# Globals: 4 variables -# SUCCESS_LOG, ERROR_LOG, COMMAND_NAME, ARGS, GDRIVE_FOLDER -# Arguments: None -# Result: read description -################################################### -_check_and_upload() { - declare all initial new_files new_file - - mapfile -t initial < "${SUCCESS_LOG}" - mapfile -t all <<< "$(printf "%s\n%s\n" "$(< "${SUCCESS_LOG}")" "$(< "${ERROR_LOG}")")" - - # check if folder is empty - [[ $(printf "%b\n" ./*) = "./*" ]] && return 0 - - all+=(*) - # shellcheck disable=SC2086 - { [ -n "${INCLUDE_FILES}" ] && mapfile -t all <<< "$(printf "%s\n" "${all[@]}" | grep -E ${INCLUDE_FILES})"; } || : - # shellcheck disable=SC2086 - mapfile -t new_files <<< "$(eval grep -vxEf <(printf "%s\n" "${initial[@]}") <(printf "%s\n" "${all[@]}") ${EXCLUDE_FILES} || :)" - - [[ -n ${new_files[*]} ]] && printf "" >| "${ERROR_LOG}" && { - declare -A Aseen && for new_file in "${new_files[@]}"; do - { [[ ${Aseen[new_file]} ]] && continue; } || Aseen[${new_file}]=x - if eval "\"${COMMAND_PATH}\"" "\"${new_file}\"" "${ARGS}"; then - printf "%s\n" "${new_file}" >> "${SUCCESS_LOG}" - else - printf "%s\n" "${new_file}" >> "${ERROR_LOG}" - printf "%s\n" "Error: Input - ${new_file}" - fi - printf "\n" - done - } - return 0 -} - -################################################### -# Loop _check_and_upload function, sleep for sometime in between -# Globals: 1 variable, 1 function -# Variable - SYNC_TIME_TO_SLEEP -# Function - _check_and_upload -# Arguments: None -# Result: read description -################################################### -_loop() { - while :; do - _check_and_upload - sleep "${SYNC_TIME_TO_SLEEP}" - done -} - -################################################### -# Check if a loop exists with given input -# Globals: 3 variables, 3 function -# Variable - FOLDER, PID, GDRIVE_FOLDER -# Function - _setup_loop_variables, _setup_loop_files, _check_pid -# Arguments: None -# Result: return 0 - No existing loop, 1 - loop exists, 2 - loop only in database -# if return 2 - then remove entry from database -################################################### -_check_existing_loop() { - _setup_loop_variables "${FOLDER}" "${GDRIVE_FOLDER}" - _setup_loop_files - if [[ -z ${PID} ]]; then - RETURN_STATUS=0 - elif _check_pid "${PID}"; then - RETURN_STATUS=1 - else - _remove_job "${PID}" - _setup_loop_variables "${FOLDER}" "${GDRIVE_FOLDER}" - _setup_loop_files - RETURN_STATUS=2 - fi - return 0 -} - -################################################### -# Start a new sync job by _loop function -# Print sync job information -# Globals: 7 variables, 1 function -# Variable - LOGS, PID_FILE, INPUT, GDRIVE_FOLDER, FOLDER, SYNC_LIST, FOREGROUND -# Function - _loop -# Arguments: None -# Result: read description -# Show logs at last and don't hangup if SHOW_LOGS is set -################################################### -_start_new_loop() { - if [[ -n ${FOREGROUND} ]]; then - printf "%b\n" "Local Folder: ${INPUT}\nDrive Folder: ${GDRIVE_FOLDER}\n" - trap '_clear_line 1 && printf "\n" && _remove_job "" "${GDRIVE_FOLDER}" "${FOLDER}"; exit' INT TERM - trap 'printf "Job stopped.\n" ; exit' EXIT - _loop - else - (_loop &> "${LOGS}") & # A double fork doesn't get killed if script exits - PID="${!}" - printf "%s\n" "${PID}" >| "${PID_FILE}" - printf "%b\n" "Job started.\nLocal Folder: ${INPUT}\nDrive Folder: ${GDRIVE_FOLDER}" - printf "%s\n" "PID: ${PID}" - printf "%b\n" "PID: ${PID}|:_//_:|${FOLDER}|:_//_:|${GDRIVE_FOLDER}" >> "${SYNC_LIST}" - [[ -n ${SHOW_LOGS} ]] && tail -f "${LOGS}" - fi - return 0 -} - -################################################### -# Triggers in case either -j & -k or -l flag ( both -k|-j if with positive integer as argument ) -# Priority: -j > -i > -l > -k -# Globals: 5 variables, 6 functions -# Variables - JOB, SHOW_JOBS_VERBOSE, INFO_PID, LOG_PID, KILL_PID ( all array ) -# Functions - _check_pid, _setup_loop_variables -# _kill_job, _show_jobs, _get_job_info, _remove_job -# Arguments: None -# Result: show either job info, individual info or kill job(s) according to set global variables. -# Script exits after -j and -k if kill all is triggered ) -################################################### -_do_job() { - case "${JOB[*]}" in - *SHOW_JOBS*) - _show_jobs "${SHOW_JOBS_VERBOSE:-}" - exit - ;; - *KILL_ALL*) - PIDS="$(_show_jobs | grep -o 'PID:.*[0-9]' | sed "s/PID: //g" || :)" && total=0 - [[ -n ${PIDS} ]] && { - for _pid in ${PIDS}; do - printf "PID: %s - " "${_pid##* }" - _kill_job "${_pid##* }" - ((total += 1)) - done - } - printf "\nTotal Jobs Killed: %s\n" "${total}" - exit - ;; - *PIDS*) - for pid in "${ALL_PIDS[@]}"; do - [[ ${JOB_TYPE} =~ INFO ]] && { - _get_job_info "${pid}" more - [[ ${RETURN_STATUS} -gt 0 ]] && { - [[ ${RETURN_STATUS} = 1 ]] && _remove_job "${pid}" - printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 - } - } - [[ ${JOB_TYPE} =~ SHOW_LOGS ]] && { - input="$(grep "${pid}" "${SYNC_LIST}" || :)" - if [[ -n ${input} ]]; then - _check_pid "${pid}" && { - : "${input#*"|:_//_:|"}" && local_folder="${_/"|:_//_:|"*/}" - _setup_loop_variables "${local_folder}" "${input/*"|:_//_:|"/}" - tail -f "${LOGS}" - } - else - printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 - fi - } - [[ ${JOB_TYPE} =~ KILL ]] && { - _get_job_info "${pid}" - if [[ ${RETURN_STATUS} = 0 ]]; then - _kill_job "${pid}" - else - [[ ${RETURN_STATUS} = 1 ]] && _remove_job "${pid}" - printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 - fi - } - done - [[ ${JOB_TYPE} =~ (INFO|SHOW_LOGS|KILL) ]] && exit 0 - ;; - esac - return 0 -} - -################################################### -# Process all arguments given to the script -# Globals: 1 variable, 3 functions -# Variable - HOME -# Functions - _kill_jobs, _show_jobs, _get_job_info -# Arguments: Many -# ${@} = Flags with arguments -# Result: On -# Success - Set all the variables -# Error - Print error message and exit -################################################### -_setup_arguments() { - [[ $# = 0 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - unset SYNC_TIME_TO_SLEEP ARGS COMMAND_NAME DEBUG GDRIVE_FOLDER KILL SHOW_LOGS - COMMAND_NAME="gupload" - - _check_longoptions() { - [[ -z ${2} ]] && - printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' \ - "${0##*/}" "${1}" "${0##*/}" && exit 1 - return 0 - } - - while [[ $# -gt 0 ]]; do - case "${1}" in - -h | --help) _usage ;; - -D | --debug) DEBUG="true" && export DEBUG && _check_debug ;; - -d | --directory) - _check_longoptions "${1}" "${2}" - GDRIVE_FOLDER="${2}" && shift - ARGS+=" -C \"${GDRIVE_FOLDER}\" " - ;; - -j | --jobs) - [[ ${2} = v* ]] && SHOW_JOBS_VERBOSE="true" && shift - JOB=(SHOW_JOBS) - ;; - -p | --pid) - _check_longoptions "${1}" "${2}" - if [[ ${2} -gt 0 ]]; then - ALL_PIDS+=("${2}") && shift - JOB+=(PIDS) - else - printf "-p/--pid only takes postive integer as arguments.\n" - exit 1 - fi - ;; - -i | --info) JOB_TYPE+="INFO" && INFO="true" ;; - -k | --kill) - JOB_TYPE+="KILL" && KILL="true" - [[ ${2} = all ]] && JOB=(KILL_ALL) && shift - ;; - -l | --logs) JOB_TYPE+="SHOW_LOGS" && SHOW_LOGS="true" ;; - -t | --time) - _check_longoptions "${1}" "${2}" - if [[ ${2} -gt 0 ]]; then - [[ ${2} = default* ]] && UPDATE_DEFAULT_TIME_TO_SLEEP="_update_config" - TO_SLEEP="${2/default=/}" && shift - else - printf "-t/--time only takes positive integers as arguments, min = 1, max = infinity.\n" - exit 1 - fi - ;; - -a | --arguments) - _check_longoptions "${1}" "${2}" - [[ ${2} = default* ]] && UPDATE_DEFAULT_ARGS="_update_config" - ARGS+="${2/default=/} " && shift - ;; - -fg | --foreground) FOREGROUND="true" && SHOW_LOGS="true" ;; - -in | --include) - _check_longoptions "${1}" "${2}" - INCLUDE_FILES="${INCLUDE_FILES} -e '${2}' " && shift - ;; - -ex | --exclude) - _check_longoptions "${1}" "${2}" - EXCLUDE_FILES="${EXCLUDE_FILES} -e '${2}' " && shift - ;; - -c | --command) - _check_longoptions "${1}" "${2}" - CUSTOM_COMMAND_NAME="${2}" && shift - ;; - --sync-detail-dir) - _check_longoptions "${1}" "${2}" - SYNC_DETAIL_DIR="${2}" && shift - ;; - -s | --service) - _check_longoptions "${1}" "${2}" - SERVICE_NAME="${2}" && shift - CREATE_SERVICE="true" - ;; - *) - # Check if user meant it to be a flag - if [[ ${1} = -* ]]; then - printf '%s: %s: Unknown option\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && exit 1 - else - # If no "-" is detected in 1st arg, it adds to input - FINAL_INPUT_ARRAY+=("${1}") - fi - ;; - esac - shift - done - - INFO_PATH="${HOME}/.google-drive-upload" - CONFIG_INFO="${INFO_PATH}/google-drive-upload.configpath" - [[ -f ${CONFIG_INFO} ]] && . "${CONFIG_INFO}" - CONFIG="${CONFIG:-${HOME}/.googledrive.conf}" - SYNC_DETAIL_DIR="${SYNC_DETAIL_DIR:-${INFO_PATH}/sync}" - SYNC_LIST="${SYNC_DETAIL_DIR}/sync_list" - mkdir -p "${SYNC_DETAIL_DIR}" && printf "" >> "${SYNC_LIST}" - - _do_job - - [[ -z ${FINAL_INPUT_ARRAY[*]} ]] && _short_help - - return 0 -} - -################################################### -# Grab config variables and modify defaults if necessary -# Globals: 5 variables, 2 functions -# Variables - INFO_PATH, UPDATE_DEFAULT_CONFIG, DEFAULT_ARGS -# UPDATE_DEFAULT_ARGS, UPDATE_DEFAULT_TIME_TO_SLEEP, TIME_TO_SLEEP -# Functions - _print_center, _update_config -# Arguments: None -# Result: grab COMMAND_NAME, INSTALL_PATH, and CONFIG -# source CONFIG, update default values if required -################################################### -_config_variables() { - COMMAND_NAME="${CUSTOM_COMMAND_NAME:-${COMMAND_NAME}}" - VALUES_LIST="REPO COMMAND_NAME SYNC_COMMAND_NAME INSTALL_PATH TYPE TYPE_VALUE" - VALUES_REGEX="" && for i in ${VALUES_LIST}; do - VALUES_REGEX="${VALUES_REGEX:+${VALUES_REGEX}|}^${i}=\".*\".* # added values" - done - - # Check if command exist, not necessary but just in case. - { - COMMAND_PATH="$(command -v "${COMMAND_NAME}")" 1> /dev/null && - SCRIPT_VALUES="$(grep -E "${VALUES_REGEX}|^SELF_SOURCE=\".*\"" "${COMMAND_PATH}" || :)" && eval "${SCRIPT_VALUES}" && - [[ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]] && : - } || { printf "Error: %s is not installed, use -c/--command to specify.\n" "${COMMAND_NAME}" 1>&2 && exit 1; } - - ARGS+=" -q " - SYNC_TIME_TO_SLEEP="3" - # Config file is created automatically after first run - # shellcheck source=/dev/null - [[ -r ${CONFIG} ]] && . "${CONFIG}" - - SYNC_TIME_TO_SLEEP="${TO_SLEEP:-${SYNC_TIME_TO_SLEEP}}" - ARGS+=" ${SYNC_DEFAULT_ARGS:-} " - "${UPDATE_DEFAULT_ARGS:-:}" SYNC_DEFAULT_ARGS " ${ARGS} " "${CONFIG}" - "${UPDATE_DEFAULT_TIME_TO_SLEEP:-:}" SYNC_TIME_TO_SLEEP "${SYNC_TIME_TO_SLEEP}" "${CONFIG}" - return 0 -} - -################################################### -# Print systemd service file contents -# Globals: 5 variables -# Variables - LOGNAME, INSTALL_PATH, COMMAND_NAME, SYNC_COMMAND_NAME, ALL_ARGUMNETS -# Arguments: None -################################################### -_systemd_service_contents() { - declare username="${LOGNAME:?Give username}" install_path="${INSTALL_PATH:?Missing install path}" \ - cmd="${COMMAND_NAME:?Missing command name}" sync_cmd="${SYNC_COMMAND_NAME:?Missing gsync cmd name}" \ - all_argumnets="${ALL_ARGUMNETS:-}" - - printf "%s\n" '# Systemd service file - start -[Unit] -Description=google-drive-upload synchronisation service -After=network.target - -[Service] -Type=simple -User='"${username}"' -Restart=on-abort -RestartSec=3 -ExecStart="'"${install_path}/${sync_cmd}"'" --foreground --command "'"${install_path}/${cmd}"'" --sync-detail-dir "/tmp/sync" '"${all_argumnets}"' - -# Security -PrivateTmp=true -ProtectSystem=full -NoNewPrivileges=true -ProtectControlGroups=true -ProtectKernelModules=true -ProtectKernelTunables=true -PrivateDevices=true -RestrictAddressFamilies=AF_INET AF_INET6 AF_NETLINK -RestrictNamespaces=true -RestrictRealtime=true -SystemCallArchitectures=native - -[Install] -WantedBy=multi-user.target -# Systemd service file - end' -} - -################################################### -# Create systemd service wrapper script for managing the service -# Globals: None -# Arguments: 3 -# ${1} = Service name -# ${1} = Service file contents -# ${1} = Script name -# Result: print the script contents to script file -################################################### -_systemd_service_script() { - declare name="${1:?Missing service name}" script_name script \ - service_file_contents="${2:?Missing service file contents}" - script_name="${3:?Missing script name}" - - # shellcheck disable=SC2016 - script='#!/usr/bin/env bash -set -e - -_usage() { - printf "%b" "# Service name: '"'${name}'"' - -# Print the systemd service file contents -bash \"${0##*/}\" print\n -# Add service to systemd files ( this must be run before doing any of the below ) -bash \"${0##*/}\" add\n -# Start or Stop the service -bash \"${0##*/}\" start / stop\n -# Enable or Disable as a boot service: -bash \"${0##*/}\" enable / disable\n -# See logs -bash \"${0##*/}\" logs\n -# Remove the service from system -bash \"${0##*/}\" remove\n\n" - - _status - exit 0 -} - -_status() { - declare status current_status - status="$(systemctl status '"'${name}'"' 2>&1 || :)" - current_status="$(printf "%s\n" "${status}" | env grep -E "●.*|(Loaded|Active|Main PID|Tasks|Memory|CPU): .*" || :)" - - printf "%s\n" "Current status of service: ${current_status:-${status}}" - return 0 -} - -unset TMPFILE - -[[ $# = 0 ]] && _usage - -CONTENTS='"'${service_file_contents}'"' - -_add_service() { - declare service_file_path="/etc/systemd/system/'"${name}"'.service" - printf "%s\n" "Service file path: ${service_file_path}" - if [[ -f ${service_file_path} ]]; then - printf "%s\n" "Service file already exists. Overwriting" - sudo mv "${service_file_path}" "${service_file_path}.bak" || exit 1 - printf "%s\n" "Existing service file was backed up." - printf "%s\n" "Old service file: ${service_file_path}.bak" - else - [[ -z ${TMPFILE} ]] && { - { { command -v mktemp 1>|/dev/null && TMPFILE="$(mktemp -u)"; } || - TMPFILE="${PWD}/.$(_t="$(printf "%(%s)T\\n" "-1")" && printf "%s\n" "$((_t * _t))").LOG"; } || exit 1 - } - export TMPFILE - trap "exit" INT TERM - _rm_tmpfile() { rm -f "${TMPFILE:?}" ; } - trap "_rm_tmpfile" EXIT - trap "" TSTP # ignore ctrl + z - - { printf "%s\n" "${CONTENTS}" >|"${TMPFILE}" && sudo cp "${TMPFILE}" /etc/systemd/system/'"${name}"'.service; } || - { printf "%s\n" "Error: Failed to add service file to system." && exit 1 ;} - fi - sudo systemctl daemon-reload || printf "%s\n" "Could not reload the systemd daemon." - printf "%s\n" "Service file was successfully added." - return 0 -} - -_service() { - declare service_name='"'${name}'"' action="${1:?}" service_file_path - service_file_path="/etc/systemd/system/${service_name}.service" - printf "%s\n" "Service file path: ${service_file_path}" - [[ -f ${service_file_path} ]] || { printf "%s\n" "Service file does not exist." && exit 1; } - sudo systemctl daemon-reload || exit 1 - case "${action}" in - log*) sudo journalctl -u "${service_name}" -f ;; - rm | remove) - sudo systemctl stop "${service_name}" || : - if sudo rm -f /etc/systemd/system/"${service_name}".service; then - sudo systemctl daemon-reload || : - printf "%s\n" "Service removed." && return 0 - else - printf "%s\n" "Error: Cannot remove." && exit 1 - fi - ;; - *) - declare success="${2:?}" error="${3:-}" - if sudo systemctl "${action}" "${service_name}"; then - printf "%s\n" "Success: ${service_name} ${success}." && return 0 - else - printf "%s\n" "Error: Cannot ${action} ${service_name} ${error}." && exit 1 - fi - ;; - esac - return 0 -} - -while [[ "${#}" -gt 0 ]]; do - case "${1}" in - print) printf "%s\n" "${CONTENTS}" ;; - add) _add_service ;; - start) _service start started ;; - stop) _service stop stopped ;; - enable) _service enable "boot service enabled" "boot service" ;; - disable) _service disable "boot service disabled" "boot service" ;; - logs) _service logs ;; - remove) _service rm ;; - *) printf "%s\n" "Error: No valid options provided." && _usage ;; - esac - shift -done' - printf "%s\n" "${script}" >| "${script_name}" - return 0 -} - -################################################### -# Process all the values in "${FINAL_INPUT_ARRAY[@]}" -# Globals: 20 variables, 15 functions -# Variables - FINAL_INPUT_ARRAY ( array ), DEFAULT_ACCOUNT, ROOT_FOLDER_NAME, GDRIVE_FOLDER -# PID_FILE, SHOW_LOGS, LOGS, KILL, INFO, CREATE_SERVICE, ARGS, SERVICE_NAME -# Functions - _set_value, _systemd_service_script, _systemd_service_contents, _print_center, _check_existing_loop, _start_new_loop -# Arguments: None -# Result: Start the sync jobs for given folders, if running already, don't start new. -# If a pid is detected but not running, remove that job. -# If service script is going to be created then don,t touch the jobs -################################################### -_process_arguments() { - declare current_folder && declare -A Aseen - for INPUT in "${FINAL_INPUT_ARRAY[@]}"; do - { [[ ${Aseen[${INPUT}]} ]] && continue; } || Aseen[${INPUT}]=x - ! [[ -d ${INPUT} ]] && printf "\nError: Invalid Input ( %s ), no such directory.\n" "${INPUT}" && continue - current_folder="$(pwd)" - FOLDER="$(cd "${INPUT}" && pwd)" || exit 1 - [[ -n ${DEFAULT_ACCOUNT} ]] && _set_value indirect ROOT_FOLDER_NAME "ACCOUNT_${DEFAULT_ACCOUNT}_ROOT_FOLDER_NAME" - GDRIVE_FOLDER="${GDRIVE_FOLDER:-${ROOT_FOLDER_NAME:-Unknown}}" - - [[ -n ${CREATE_SERVICE} ]] && { - ALL_ARGUMNETS="\"${FOLDER}\" ${TO_SLEEP:+-t \"${TO_SLEEP}\"} -a \"${ARGS// / }\"" - num="${num+$((num += 1))}" - service_name="gsync-${SERVICE_NAME}${num:+_${num}}" - script_name="${service_name}.service.sh" - _systemd_service_script "${service_name}" "$(_systemd_service_contents)" "${script_name}" - - _print_center "normal" "=" "=" - bash "${script_name}" - _print_center "normal" "=" "=" - continue - } - - cd "${FOLDER}" || exit 1 - _check_existing_loop - case "${RETURN_STATUS}" in - 0 | 2) _start_new_loop ;; - 1) - printf "%b\n" "Job is already running.." - if [[ -n ${INFO} ]]; then - _get_job_info "${PID}" more "PID: ${PID}|:_//_:|${FOLDER}|:_//_:|${GDRIVE_FOLDER}" - else - printf "%b\n" "Local Folder: ${INPUT}\nDrive Folder: ${GDRIVE_FOLDER}" - printf "%s\n" "PID: ${PID}" - fi - - [[ -n ${KILL} ]] && _kill_job "${PID}" && exit - [[ -n ${SHOW_LOGS} ]] && tail -f "${LOGS}" - ;; - esac - cd "${current_folder}" || exit 1 - done - return 0 -} - -main() { - [[ $# = 0 ]] && _short_help - - set -o errexit -o noclobber -o pipefail - - [[ -z ${SELF_SOURCE} ]] && { - UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" - { . "${UTILS_FOLDER}"/common-utils.bash; } || { printf "Error: Unable to source util files.\n" && exit 1; } - } - - trap '' TSTP # ignore ctrl + z - - _setup_arguments "${@}" - _check_debug - _config_variables - _process_arguments -} - -main "${@}" diff --git a/bash/release/gupload b/bash/release/gupload deleted file mode 100755 index 41c3d23..0000000 --- a/bash/release/gupload +++ /dev/null @@ -1,2282 +0,0 @@ -#!/usr/bin/env bash -SELF_SOURCE="true" -# Functions that will used in core script - -################################################### -# Convert bytes to human readable form -# Globals: None -# Required Arguments: 1 -# ${1} = Positive integer ( bytes ) -# Result: Print human readable form. -# Reference: -# https://unix.stackexchange.com/a/259254 -################################################### -_bytes_to_human() { - declare b=${1:-0} d='' s=0 S=(Bytes {K,M,G,T,P,E,Y,Z}B) - while ((b > 1024)); do - d="$(printf ".%02d" $((b % 1024 * 100 / 1024)))" - b=$((b / 1024)) && ((s++)) - done - printf "%s\n" "${b}${d} ${S[${s}]}" -} - -################################################### -# Check for bash version >= 4.x -# Globals: 1 Variable -# BASH_VERSINFO -# Required Arguments: None -# Result: If -# SUCEESS: Status 0 -# ERROR: print message and exit 1 -################################################### -_check_bash_version() { - { ! [[ ${BASH_VERSINFO:-0} -ge 4 ]] && printf "Bash version lower than 4.x not supported.\n" && exit 1; } || : -} - -################################################### -# Check if debug is enabled and enable command trace -# Globals: 2 variables, 1 function -# Varibles - DEBUG, QUIET -# Function - _is_terminal -# Arguments: None -# Result: If DEBUG -# Present - Enable command trace and change print functions to avoid spamming. -# Absent - Disable command trace -# Check QUIET, then check terminal size and enable print functions accordingly. -################################################### -_check_debug() { - if [[ -n ${DEBUG} ]]; then - set -x && PS4='-> ' - _print_center() { { [[ $# = 3 ]] && printf "%s\n" "${2}"; } || { printf "%s%s\n" "${2}" "${3}"; }; } - _clear_line() { :; } && _newline() { :; } - else - if [[ -z ${QUIET} ]]; then - if _support_ansi_escapes; then - # This refreshes the interactive shell so we can use the ${COLUMNS} variable in the _print_center function. - shopt -s checkwinsize && (: && :) - if [[ ${COLUMNS} -lt 45 ]]; then - _print_center() { { [[ $# = 3 ]] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - else - trap 'shopt -s checkwinsize; (:;:)' SIGWINCH - fi - export CURL_PROGRESS="-#" EXTRA_LOG="_print_center" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" - else - _print_center() { { [[ $# = 3 ]] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - _clear_line() { :; } - fi - _newline() { printf "%b" "${1}"; } - else - _print_center() { :; } && _clear_line() { :; } && _newline() { :; } - fi - set +x - fi - export -f _print_center _clear_line _newline -} - -################################################### -# Check internet connection. -# Probably the fastest way, takes about 1 - 2 KB of data, don't check for more than 10 secs. -# Globals: 3 functions -# _print_center, _clear_line, _timeout -# Arguments: None -# Result: On -# Success - Nothing -# Error - print message and exit 1 -################################################### -_check_internet() { - "${EXTRA_LOG}" "justify" "Checking Internet Connection.." "-" - if ! _timeout 10 curl -Is google.com; then - _clear_line 1 - "${QUIET:-_print_center}" "justify" "Error: Internet connection" " not available." "=" - return 1 - fi - _clear_line 1 -} - -################################################### -# Move cursor to nth no. of line and clear it to the begining. -# Globals: None -# Arguments: 1 -# ${1} = Positive integer ( line number ) -# Result: Read description -################################################### -_clear_line() { - printf "\033[%sA\033[2K" "${1}" -} - -################################################### -# Alternative to wc -l command -# Globals: None -# Arguments: 1 or pipe -# ${1} = file, _count < file -# variable, _count <<< variable -# pipe = echo something | _count -# Result: Read description -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#get-the-number-of-lines-in-a-file -################################################### -_count() { - mapfile -tn 0 lines - printf '%s\n' "${#lines[@]}" -} - -################################################### -# Alternative to dirname command -# Globals: None -# Arguments: 1 -# ${1} = path of file or folder -# Result: read description -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#get-the-directory-name-of-a-file-path -################################################### -_dirname() { - declare tmp=${1:-.} - - [[ ${tmp} != *[!/]* ]] && { printf '/\n' && return; } - tmp="${tmp%%"${tmp##*[!/]}"}" - - [[ ${tmp} != */* ]] && { printf '.\n' && return; } - tmp=${tmp%/*} && tmp="${tmp%%"${tmp##*[!/]}"}" - - printf '%s\n' "${tmp:-/}" -} - -################################################### -# Convert given time in seconds to readable form -# 110 to 1 minute(s) and 50 seconds -# Globals: None -# Arguments: 1 -# ${1} = Positive Integer ( time in seconds ) -# Result: read description -# Reference: -# https://stackoverflow.com/a/32164707 -################################################### -_display_time() { - declare T="${1}" - declare DAY="$((T / 60 / 60 / 24))" HR="$((T / 60 / 60 % 24))" MIN="$((T / 60 % 60))" SEC="$((T % 60))" - [[ ${DAY} -gt 0 ]] && printf '%d days ' "${DAY}" - [[ ${HR} -gt 0 ]] && printf '%d hrs ' "${HR}" - [[ ${MIN} -gt 0 ]] && printf '%d minute(s) ' "${MIN}" - [[ ${DAY} -gt 0 || ${HR} -gt 0 || ${MIN} -gt 0 ]] && printf 'and ' - printf '%d seconds\n' "${SEC}" -} - -################################################### -# Fetch latest commit sha of release or branch -# Do not use github rest api because rate limit error occurs -# Globals: None -# Arguments: 3 -# ${1} = "branch" or "release" -# ${2} = branch name or release name -# ${3} = repo name e.g labbots/google-drive-upload -# Result: print fetched sha -################################################### -_get_latest_sha() { - declare LATEST_SHA - case "${1:-${TYPE}}" in - branch) - LATEST_SHA="$( - : "$(curl --compressed -s https://github.com/"${3:-${REPO}}"/commits/"${2:-${TYPE_VALUE}}".atom -r 0-2000)" - : "$(printf "%s\n" "${_}" | grep -o "Commit\\/.*<" -m1 || :)" && : "${_##*\/}" && printf "%s\n" "${_%%<*}" - )" - ;; - release) - LATEST_SHA="$( - : "$(curl -L --compressed -s https://github.com/"${3:-${REPO}}"/releases/"${2:-${TYPE_VALUE}}")" - : "$(printf "%s\n" "${_}" | grep "=\"/""${3:-${REPO}}""/commit" -m1 || :)" && : "${_##*commit\/}" && printf "%s\n" "${_%%\"*}" - )" - ;; - esac - printf "%b" "${LATEST_SHA:+${LATEST_SHA}\n}" -} - -################################################### -# Encode the given string to parse properly as json -# Globals: None -# Arguments: 2 -# ${1} = json or something else -# ${2} = input -# Result: if ${1} is j, then escape all chars, else only special chars -# Reference: -# https://tools.ietf.org/html/rfc7159#section-7 -################################################### -_json_escape() { - declare mode="${1:?}" input="${2:?Provide Input}" - [[ ${mode} = "j" ]] && { - # \ and / - : "${input//\\/\\\\}" - : "${_//\//\\\/}" - # : "${_//\'/\\\'}" # ' (not strictly needed ?) - input="${_//\"/\\\"}" # " - } - : "${input//$'\t'/\\\t}" # \t (tab) - : "${_//$'\n'/\\\n}" # \n (newline) - : "${_//$'\r'/\\\r}" # \r (carriage return) - : "${_//$'\f'/\\\f}" # \f (form feed) - : "${_//$'\b'/\\\b}" # \b (backspace) - printf "%s" "${_}" -} - -################################################### -# Method to extract specified field data from json -# Globals: None -# Arguments: 2 -# ${1} - value of field to fetch from json -# ${2} - Optional, no of lines to parse for the given field in 1st arg -# ${3} - Optional, nth number of value from extracted values, default it 1. -# Input: file | here string | pipe -# _json_value "Arguments" < file -# _json_value "Arguments" <<< "${varibale}" -# echo something | _json_value "Arguments" -# Result: print extracted value -################################################### -_json_value() { - declare num _tmp no_of_lines - { [[ ${2} -gt 0 ]] && no_of_lines="${2}"; } || : - { [[ ${3} -gt 0 ]] && num="${3}"; } || { [[ ${3} != all ]] && num=1; } - # shellcheck disable=SC2086 - _tmp="$(grep -o "\"${1}\"\:.*" ${no_of_lines:+-m} ${no_of_lines})" || return 1 - printf "%s\n" "${_tmp}" | sed -e "s/.*\"""${1}""\"://" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num}"p || : -} - -################################################### -# Print a text to center interactively and fill the rest of the line with text specified. -# This function is fine-tuned to this script functionality, so may appear unusual. -# Globals: 1 variable -# COLUMNS -# Arguments: 4 -# If ${1} = normal -# ${2} = text to print -# ${3} = symbol -# If ${1} = justify -# If remaining arguments = 2 -# ${2} = text to print -# ${3} = symbol -# If remaining arguments = 3 -# ${2}, ${3} = text to print -# ${4} = symbol -# Result: read description -# Reference: -# https://gist.github.com/TrinityCoder/911059c83e5f7a351b785921cf7ecda -################################################### -_print_center() { - [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare -i TERM_COLS="${COLUMNS}" - declare type="${1}" filler - case "${type}" in - normal) declare out="${2}" && symbol="${3}" ;; - justify) - if [[ $# = 3 ]]; then - declare input1="${2}" symbol="${3}" TO_PRINT out - TO_PRINT="$((TERM_COLS - 5))" - { [[ ${#input1} -gt ${TO_PRINT} ]] && out="[ ${input1:0:TO_PRINT}..]"; } || { out="[ ${input1} ]"; } - else - declare input1="${2}" input2="${3}" symbol="${4}" TO_PRINT temp out - TO_PRINT="$((TERM_COLS * 47 / 100))" - { [[ ${#input1} -gt ${TO_PRINT} ]] && temp+=" ${input1:0:TO_PRINT}.."; } || { temp+=" ${input1}"; } - TO_PRINT="$((TERM_COLS * 46 / 100))" - { [[ ${#input2} -gt ${TO_PRINT} ]] && temp+="${input2:0:TO_PRINT}.. "; } || { temp+="${input2} "; } - out="[${temp}]" - fi - ;; - *) return 1 ;; - esac - - declare -i str_len=${#out} - [[ $str_len -ge $((TERM_COLS - 1)) ]] && { - printf "%s\n" "${out}" && return 0 - } - - declare -i filler_len="$(((TERM_COLS - str_len) / 2))" - [[ $# -ge 2 ]] && ch="${symbol:0:1}" || ch=" " - for ((i = 0; i < filler_len; i++)); do - filler="${filler}${ch}" - done - - printf "%s%s%s" "${filler}" "${out}" "${filler}" - [[ $(((TERM_COLS - str_len) % 2)) -ne 0 ]] && printf "%s" "${ch}" - printf "\n" - - return 0 -} - -################################################### -# Quiet version of _print_center -################################################### -_print_center_quiet() { - { [[ $# = 3 ]] && printf "%s\n" "${2}"; } || printf "%s%s\n" "${2}" "${3}" -} - -################################################### -# Evaluates value1=value2 -# Globals: None -# Arguments: 3 -# ${1} = direct ( d ) or indirect ( i ) - ( evaluation mode ) -# ${2} = var name -# ${3} = var value -# Result: export value1=value2 -################################################### -_set_value() { - case "${1:?}" in - d | direct) export "${2:?}=${3}" ;; - i | indirect) export "${2:?}=${!3}" ;; - esac -} - -################################################### -# Check if script terminal supports ansi escapes -# Globals: 1 variable -# TERM -# Arguments: None -# Result: return 1 or 0 -################################################### -_support_ansi_escapes() { - { [[ -t 2 && -n ${TERM} && ${TERM} =~ (xterm|rxvt|urxvt|linux|vt|screen) ]] && return 0; } || return 1 -} - -################################################### -# Alternative to timeout command -# Globals: None -# Arguments: 1 and rest -# ${1} = amount of time to sleep -# rest = command to execute -# Result: Read description -# Reference: -# https://stackoverflow.com/a/24416732 -################################################### -_timeout() { - declare timeout="${1:?Error: Specify Timeout}" && shift - { - "${@}" & - child="${!}" - trap -- "" TERM - { - sleep "${timeout}" - kill -9 "${child}" - } & - wait "${child}" - } 2>| /dev/null 1>&2 -} - -################################################### -# Config updater -# Incase of old value, update, for new value add. -# Globals: None -# Arguments: 3 -# ${1} = value name -# ${2} = value -# ${3} = config path -# Result: read description -################################################### -_update_config() { - [[ $# -lt 3 ]] && printf "Missing arguments\n" && return 1 - declare value_name="${1}" value="${2}" config_path="${3}" - ! [ -f "${config_path}" ] && : >| "${config_path}" # If config file doesn't exist. - chmod u+w "${config_path}" || return 1 - printf "%s\n%s\n" "$(grep -v -e "^$" -e "^${value_name}=" "${config_path}" || :)" \ - "${value_name}=\"${value}\"" >| "${config_path}" || return 1 - chmod a-w-r-x,u+r "${config_path}" || return 1 - return 0 -} - -################################################### -# Encode the given string to parse properly in network requests -# Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#percent-encode-a-string -################################################### -_url_encode() { - declare LC_ALL=C - for ((i = 0; i < ${#1}; i++)); do - : "${1:i:1}" - case "${_}" in - [a-zA-Z0-9.~_-]) - printf '%s' "${_}" - ;; - *) - printf '%%%02X' "'${_}" - ;; - esac - done 2>| /dev/null - printf '\n' -} - -export -f _bytes_to_human \ - _check_bash_version \ - _check_debug \ - _check_internet \ - _clear_line \ - _count \ - _dirname \ - _display_time \ - _get_latest_sha \ - _json_escape \ - _json_value \ - _print_center \ - _print_center_quiet \ - _set_value \ - _support_ansi_escapes \ - _timeout \ - _update_config \ - _url_encode -# auth utils for Google Drive -# shellcheck source=/dev/null - -################################################### -# Check if account name is valid by a regex expression -# Globals: None -# Arguments: 1 -# ${1} = Account name -# Result: read description and return 1 or 0 -################################################### -_account_name_valid() { - declare name="${1:-}" account_name_regex='^([A-Za-z0-9_])+$' - [[ ${name} =~ ${account_name_regex} ]] || return 1 - return 0 -} - -################################################### -# Check if account exists -# First check if the given account is in correct format -# then check if client [id|token] and refresh token is present -# Globals: 2 functions -# _set_value, _account_name_valid -# Arguments: 1 -# ${1} = Account name -# Result: read description and return 1 or 0 -################################################### -_account_exists() { - declare name="${1:-}" client_id client_secret refresh_token - _account_name_valid "${name}" || return 1 - _set_value indirect client_id "ACCOUNT_${name}_CLIENT_ID" - _set_value indirect client_secret "ACCOUNT_${name}_CLIENT_SECRET" - _set_value indirect refresh_token "ACCOUNT_${name}_REFRESH_TOKEN" - [[ -z ${client_id:+${client_secret:+${refresh_token}}} ]] && return 1 - return 0 -} - -################################################### -# Show all accounts configured in config file -# Globals: 2 variables, 4 functions -# Variable - CONFIG, QUIET -# Functions - _account_exists, _set_value, _print_center, _reload_config -# Arguments: None -# Result: SHOW all accounts, export COUNT and ACC_${count}_ACC dynamic variables -# or print "No accounts configured yet." -################################################### -_all_accounts() { - { _reload_config && _handle_old_config; } || return 1 - declare all_accounts && COUNT=0 - mapfile -t all_accounts <<< "$(grep -oE '^ACCOUNT_.*_CLIENT_ID' "${CONFIG}" | sed -e "s/ACCOUNT_//g" -e "s/_CLIENT_ID//g")" - for account in "${all_accounts[@]}"; do - [[ -n ${account} ]] && _account_exists "${account}" && - { [[ ${COUNT} = 0 ]] && "${QUIET:-_print_center}" "normal" " All available accounts. " "=" || :; } && - printf "%b" "$((COUNT += 1)). ${account} \n" && _set_value direct "ACC_${COUNT}_ACC" "${account}" - done - { [[ ${COUNT} -le 0 ]] && "${QUIET:-_print_center}" "normal" " No accounts configured yet. " "=" 1>&2; } || printf '\n' - return 0 -} - -################################################### -# Setup a new account name -# If given account name is configured already, then ask for name -# after name has been properly setup, export ACCOUNT_NAME var -# Globals: 1 variable, 5 functions -# Variable - QUIET -# Functions - _print_center, _account_exists, _clear_line, _account_name_valid, _reload_config -# Arguments: 1 -# ${1} = Account name ( optional ) -# Result: read description and export ACCOUNT_NAME NEW_ACCOUNT_NAME -################################################### -_set_new_account_name() { - _reload_config || return 1 - declare new_account_name="${1:-}" name_valid - [[ -z ${new_account_name} ]] && { - _all_accounts 2>| /dev/null - "${QUIET:-_print_center}" "normal" " New account name: " "=" - "${QUIET:-_print_center}" "normal" "Info: Account names can only contain alphabets / numbers / dashes." " " && printf '\n' - } - until [[ -n ${name_valid} ]]; do - if [[ -n ${new_account_name} ]]; then - if _account_name_valid "${new_account_name}"; then - if _account_exists "${new_account_name}"; then - "${QUIET:-_print_center}" "normal" " Warning: Given account ( ${new_account_name} ) already exists, input different name. " "-" 1>&2 - unset new_account_name && continue - else - export NEW_ACCOUNT_NAME="${new_account_name}" ACCOUNT_NAME="${new_account_name}" && name_valid="true" && continue - fi - else - "${QUIET:-_print_center}" "normal" " Warning: Given account name ( ${new_account_name} ) invalid, input different name. " "-" 1>&2 - unset new_account_name && continue - fi - else - [[ -t 1 ]] || { "${QUIET:-_print_center}" "normal" " Error: Not running in an interactive terminal, cannot ask for new account name. " 1>&2 && return 1; } - printf -- "-> \033[?7l" - read -r new_account_name - printf '\033[?7h' - fi - _clear_line 1 - done - "${QUIET:-_print_center}" "normal" " Given account name: ${NEW_ACCOUNT_NAME} " "=" - export ACCOUNT_NAME="${NEW_ACCOUNT_NAME}" - return 0 -} - -################################################### -# Delete a account from config file -# Globals: 2 variables, 3 functions -# Variables - CONFIG, QUIET -# Functions - _account_exists, _print_center, _reload_config -# Arguments: None -# Result: check if account exists and delete from config, else print error message -################################################### -_delete_account() { - { _reload_config && _handle_old_config; } || return 1 - declare account="${1:?Error: give account name}" regex config_without_values - if _account_exists "${account}"; then - regex="^ACCOUNT_${account}_(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)|DEFAULT_ACCOUNT=\"${account}\"" - config_without_values="$(grep -vE "${regex}" "${CONFIG}")" - chmod u+w "${CONFIG}" || return 1 # change perms to edit - printf "%s\n" "${config_without_values}" >| "${CONFIG}" || return 1 - chmod "a-w-r-x,u+r" "${CONFIG}" || return 1 # restore perms - "${QUIET:-_print_center}" "normal" " Successfully deleted account ( ${account} ) from config. " "-" - _reload_config # reload config if successfully deleted - else - "${QUIET:-_print_center}" "normal" " Error: Cannot delete account ( ${account} ) from config. No such account exists. " "-" 1>&2 - fi - return 0 -} - -################################################### -# handle legacy config -# this will be triggered only if old config values are present, convert to new format -# new account will be created with "default" name, if default already taken, then add a number as suffix -# Globals: 7 variables, 2 functions -# Variables - CLIENT_ID CLIENT_SECRET, REFRESH_TOKEN, ROOT_FOLDER, ROOT_FOLDER_NAME CONFIG, ACCOUNT_NAME -# Functions - _account_exists, _reload_config -# Arguments: None -################################################### -_handle_old_config() { - export CLIENT_ID CLIENT_SECRET REFRESH_TOKEN # to handle a shellcheck warning - # only try to convert the if all three values are present - [[ -n ${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}} ]] && { - declare account_name="default" regex config_without_values count=0 - # first try to name the new account as default, otherwise try to add numbers as suffix - until ! _account_exists "${account_name}"; do - account_name="${account_name}$((count += 1))" - done - # form a regex expression to remove values from config, _update_config isn't used here to prevent a loop and multiple grep calls - regex="^(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)" - config_without_values="$(grep -vE "${regex}" "${CONFIG}")" - chmod u+w "${CONFIG}" || return 1 # change perms to edit - printf "%s\n%s\n%s\n%s\n%s\n%s\n" \ - "ACCOUNT_${account_name}_CLIENT_ID=\"${CLIENT_ID}\"" \ - "ACCOUNT_${account_name}_CLIENT_SECRET=\"${CLIENT_SECRET}\"" \ - "ACCOUNT_${account_name}_REFRESH_TOKEN=\"${REFRESH_TOKEN}\"" \ - "ACCOUNT_${account_name}_ROOT_FOLDER=\"${ROOT_FOLDER}\"" \ - "ACCOUNT_${account_name}_ROOT_FOLDER_NAME=\"${ROOT_FOLDER_NAME}\"" \ - "${config_without_values}" >| "${CONFIG}" || return 1 - - chmod "a-w-r-x,u+r" "${CONFIG}" || return 1 # restore perms - - _reload_config || return 1 # reload config file - } - return 0 -} - -################################################### -# handle old config values, new account creation, custom account name, updating default config and account -# start token service if applicable -# Globals: 12 variables, 7 functions -# Variables - DEFAULT_CONFIG, NEW_ACCOUNT_NAME, CUSTOM_ACCOUNT_NAME, DELETE_ACCOUNT_NAME, LIST_ACCOUNTS, QUIET -# UPDATE_DEFAULT_ACCOUNT, UPDATE_DEFAULT_CONFIG, CONFIG_INFO, CONTINUE_WITH_NO_INPUT -# Functions - _reload_config, _handle_old_config, _set_new_account_name, _account_exists, _all_accounts -# _check_account_credentials, _token_bg_service, _print_center, _update_config, _set_value -# Arguments: None -# Result: read description and start access token check in bg if required -################################################### -_check_credentials() { - { _reload_config && _handle_old_config; } || return 1 - # set account name to default account name - ACCOUNT_NAME="${DEFAULT_ACCOUNT}" - - if [[ -n ${NEW_ACCOUNT_NAME} ]]; then - # create new account, --create-account flag - _set_new_account_name "${NEW_ACCOUNT_NAME}" || return 1 - _check_account_credentials "${ACCOUNT_NAME}" || return 1 - else - # use custom account, --account flag - if [[ -n ${CUSTOM_ACCOUNT_NAME} ]]; then - if _account_exists "${CUSTOM_ACCOUNT_NAME}"; then - ACCOUNT_NAME="${CUSTOM_ACCOUNT_NAME}" - else - # error out in case CUSTOM_ACCOUNT_NAME is invalid - "${QUIET:-_print_center}" "normal" " Error: No such account ( ${CUSTOM_ACCOUNT_NAME} ) exists. " "-" && return 1 - fi - elif [[ -n ${DEFAULT_ACCOUNT} ]]; then - # check if default account if valid or not, else set account name to nothing and remove default account in config - _account_exists "${DEFAULT_ACCOUNT}" || { - _update_config DEFAULT_ACCOUNT "" "${CONFIG}" && unset DEFAULT_ACCOUNT ACCOUNT_NAME && UPDATE_DEFAULT_ACCOUNT="_update_config" - } - # UPDATE_DEFAULT_ACCOUNT to true so that default config is updated later - else - UPDATE_DEFAULT_ACCOUNT="_update_config" # as default account doesn't exist - fi - - # in case no account name is set at this point of script - if [[ -z ${ACCOUNT_NAME} ]]; then - # if accounts are configured but default account is not set - # COUNT comes from _all_accounts function - if _all_accounts 2>| /dev/null && [[ ${COUNT} -gt 0 ]]; then - # set ACCOUNT_NAME without asking if only one account available - if [[ ${COUNT} -eq 1 ]]; then - _set_value indirect ACCOUNT_NAME "ACC_1_ACC" # ACC_1_ACC comes from _all_accounts function - else - "${QUIET:-_print_center}" "normal" " Above accounts are configured, but default one not set. " "=" - if [[ -t 1 ]]; then - "${QUIET:-_print_center}" "normal" " Choose default account: " "-" - until [[ -n ${ACCOUNT_NAME} ]]; do - printf -- "-> \033[?7l" - read -r account_name - printf '\033[?7h' - if [[ ${account_name} -gt 0 && ${account_name} -le ${COUNT} ]]; then - _set_value indirect ACCOUNT_NAME "ACC_${COUNT}_ACC" - else - _clear_line 1 - fi - done - else - # if not running in a terminal then choose 1st one as default - printf "%s\n" "Warning: Script is not running in a terminal, choosing first account as default." - _set_value indirect ACCOUNT_NAME "ACC_1_ACC" # ACC_1_ACC comes from _all_accounts function - fi - fi - else - _set_new_account_name "" || return 1 - _check_account_credentials "${ACCOUNT_NAME}" || return 1 - fi - fi - _check_account_credentials "${ACCOUNT_NAME}" || return 1 - fi - - "${UPDATE_DEFAULT_ACCOUNT:-:}" DEFAULT_ACCOUNT "${ACCOUNT_NAME}" "${CONFIG}" # update default account if required - "${UPDATE_DEFAULT_CONFIG:-:}" CONFIG "${CONFIG}" "${CONFIG_INFO}" # update default config if required - - # only launch the token service if there was some input - [[ -n ${CONTINUE_WITH_NO_INPUT} ]] || _token_bg_service # launch token bg service - return 0 -} - -################################################### -# check credentials for a given account name -# Globals: 3 functions -# Functions - _check_client, _check_refresh_token, _check_access_token -# Arguments: 2 -# ${1} = Account name ( optional ) -# Result: read description, return 1 or 0 -################################################### -_check_account_credentials() { - declare account_name="${1:-}" - { - _check_client ID "${account_name}" && - _check_client SECRET "${account_name}" && - _check_refresh_token "${account_name}" && - _check_access_token "${account_name}" check - } || return 1 - return 0 -} - -################################################### -# Check client id or secret and ask if required -# Globals: 4 variables, 3 functions -# Variables - CONFIG, QUIET, CLIENT_ID_${ACCOUNT_NAME}, CLIENT_SECRET_${ACCOUNT_NAME} -# Functions - _print_center, _update_config, _set_value -# Arguments: 2 -# ${1} = ID or SECRET -# ${2} = Account name ( optional - if not given, then just CLIENT_[ID|SECRET] var is used ) -# Result: read description and export ACCOUNT_name_CLIENT_[ID|SECRET] CLIENT_[ID|SECRET] -################################################### -_check_client() { - declare type="CLIENT_${1:?Error: ID or SECRET}" account_name="${2:-}" \ - type_name type_value type_regex valid client message - export client_id_regex='[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com' client_secret_regex='[0-9A-Za-z_-]+' - type_name="${account_name:+ACCOUNT_${account_name}_}${type}" - - # set the type_value to the actual value of ACCOUNT_${account_name}_[ID|SECRET] - _set_value indirect type_value "${type_name}" - # set the type_regex to the actual value of client_id_regex or client_secret_regex - _set_value indirect type_regex "${type}_regex" - - until [[ -n ${type_value} && -n ${valid} ]]; do - [[ -n ${type_value} ]] && { - if [[ ${type_value} =~ ${type_regex} ]]; then - [[ -n ${client} ]] && { _update_config "${type_name}" "${type_value}" "${CONFIG}" || return 1; } - valid="true" && continue - else - { [[ -n ${client} ]] && message="- Try again"; } || message="in config ( ${CONFIG} )" - "${QUIET:-_print_center}" "normal" " Invalid Client ${1} ${message} " "-" && unset "${type_name}" client - fi - } - [[ -z ${client} ]] && printf "\n" && "${QUIET:-_print_center}" "normal" " Enter Client ${1} " "-" - [[ -n ${client} ]] && _clear_line 1 - printf -- "-> " - read -r "${type_name?}" && client=1 - _set_value indirect type_value "${type_name}" - done - - # export ACCOUNT_name_CLIENT_[ID|SECRET] - _set_value direct "${type_name}" "${type_value}" - # export CLIENT_[ID|SECRET] - _set_value direct "${type}" "${type_value}" - - return 0 -} - -################################################### -# Check refresh token and ask if required -# Globals: 8 variables, 4 functions -# Variables - CLIENT_ID, CLIENT_SECRET, REDIRECT_URI, TOKEN_URL, CONFIG, QUIET -# Functions - _set_value, _print_center, _update_config, _check_access_token -# Arguments: 1 -# ${1} = Account name ( optional - if not given, then just REFRESH_TOKEN var is used ) -# Result: read description & export REFRESH_TOKEN ACCOUNT_${account_name}_REFRESH_TOKEN -################################################### -_check_refresh_token() { - # bail out before doing anything if client id and secret is not present, unlikely to happen but just in case - [[ -z ${CLIENT_ID:+${CLIENT_SECRET}} ]] && return 1 - declare account_name="${1:-}" \ - refresh_token_regex='[0-9]//[0-9A-Za-z_-]+' authorization_code_regex='[0-9]/[0-9A-Za-z_-]+' - declare refresh_token_name="${account_name:+ACCOUNT_${account_name}_}REFRESH_TOKEN" check_error - - _set_value indirect refresh_token_value "${refresh_token_name}" - - [[ -n ${refresh_token_value} ]] && { - ! [[ ${refresh_token_value} =~ ${refresh_token_regex} ]] && - "${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token in config file, follow below steps.. " "-" && unset refresh_token_value - } - - [[ -z ${refresh_token_value} ]] && { - printf "\n" && "${QUIET:-_print_center}" "normal" "If you have a refresh token generated, then type the token, else leave blank and press return key.." " " - printf "\n" && "${QUIET:-_print_center}" "normal" " Refresh Token " "-" && printf -- "-> " - read -r refresh_token_value - if [[ -n ${refresh_token_value} ]]; then - "${QUIET:-_print_center}" "normal" " Checking refresh token.. " "-" - if [[ ${refresh_token_value} =~ ${refresh_token_regex} ]]; then - _set_value direct REFRESH_TOKEN "${refresh_token_value}" - { _check_access_token "${account_name}" skip_check && - _update_config "${refresh_token_name}" "${refresh_token_value}" "${CONFIG}" && - _clear_line 1; } || check_error=true - else - check_error=true - fi - [[ -n ${check_error} ]] && "${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token given, follow below steps to generate.. " "-" && unset refresh_token_value - else - "${QUIET:-_print_center}" "normal" " No Refresh token given, follow below steps to generate.. " "-" && unset refresh_token_value - fi - - [[ -z ${refresh_token_value} ]] && { - printf "\n" && "${QUIET:-_print_center}" "normal" "Visit the below URL, tap on allow and then enter the code obtained" " " - URL="https://accounts.google.com/o/oauth2/auth?client_id=${CLIENT_ID}&redirect_uri=${REDIRECT_URI}&scope=${SCOPE}&response_type=code&prompt=consent" - printf "\n%s\n" "${URL}" - declare AUTHORIZATION_CODE authorization_code AUTHORIZATION_CODE_VALID response - until [[ -n ${AUTHORIZATION_CODE} && -n ${AUTHORIZATION_CODE_VALID} ]]; do - [[ -n ${AUTHORIZATION_CODE} ]] && { - if [[ ${AUTHORIZATION_CODE} =~ ${authorization_code_regex} ]]; then - AUTHORIZATION_CODE_VALID="true" && continue - else - "${QUIET:-_print_center}" "normal" " Invalid CODE given, try again.. " "-" && unset AUTHORIZATION_CODE authorization_code - fi - } - { [[ -z ${authorization_code} ]] && printf "\n" && "${QUIET:-_print_center}" "normal" " Enter the authorization code " "-"; } || _clear_line 1 - printf -- "-> \033[?7l" - read -r AUTHORIZATION_CODE && authorization_code=1 - printf '\033[?7h' - done - response="$(curl --compressed "${CURL_PROGRESS}" -X POST \ - --data "code=${AUTHORIZATION_CODE}&client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&redirect_uri=${REDIRECT_URI}&grant_type=authorization_code" "${TOKEN_URL}")" || : - _clear_line 1 1>&2 - - refresh_token_value="$(_json_value refresh_token 1 1 <<< "${response}")" || - { printf "%s\n" "Error: Cannot fetch refresh token, make sure the authorization code was correct." && return 1; } - - _set_value direct REFRESH_TOKEN "${refresh_token_value}" - { _check_access_token "${account_name}" skip_check "${response}" && - _update_config "${refresh_token_name}" "${refresh_token_value}" "${CONFIG}"; } || return 1 - } - printf "\n" - } - - # export ACCOUNT_name_REFRESH_TOKEN - _set_value direct "${refresh_token_name}" "${refresh_token_value}" - # export REFRESH_TOKEN - _set_value direct REFRESH_TOKEN "${refresh_token_value}" - - return 0 -} - -################################################### -# Check access token and create/update if required -# Also update in config -# Globals: 9 variables, 3 functions -# Variables - CLIENT_ID, CLIENT_SECRET, REFRESH_TOKEN, TOKEN_URL, CONFIG, API_URL, API_VERSION, QUIET -# Functions - _print_center, _update_config, _set_value -# Arguments: 2 -# ${1} = Account name ( optional - if not given, then just ACCESS_TOKEN var is used ) -# ${2} = if skip_check, then force create access token, else check with regex and expiry -# ${3} = json response ( optional ) -# Result: read description & export ACCESS_TOKEN ACCESS_TOKEN_EXPIRY -################################################### -_check_access_token() { - # bail out before doing anything if client id|secret or refresh token is not present, unlikely to happen but just in case - [[ -z ${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}} ]] && return 1 - - declare account_name="${1:-}" no_check="${2:-false}" response_json="${3:-}" \ - token_name token_expiry_name token_value token_expiry_value response \ - access_token_regex='ya29\.[0-9A-Za-z_-]+' - declare token_name="${account_name:+ACCOUNT_${account_name}_}ACCESS_TOKEN" - declare token_expiry_name="${token_name}_EXPIRY" - - _set_value indirect token_value "${token_name}" - _set_value indirect token_expiry_value "${token_expiry_name}" - - [[ ${no_check} = skip_check || -z ${token_value} || ${token_expiry_value:-0} -lt "$(printf "%(%s)T\\n" "-1")" || ! ${token_value} =~ ${access_token_regex} ]] && { - response="${response_json:-$(curl --compressed -s -X POST --data \ - "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")}" || : - - if token_value="$(_json_value access_token 1 1 <<< "${response}")"; then - token_expiry_value="$(($(printf "%(%s)T\\n" "-1") + $(_json_value expires_in 1 1 <<< "${response}") - 1))" - _update_config "${token_name}" "${token_value}" "${CONFIG}" || return 1 - _update_config "${token_expiry_name}" "${token_expiry_value}" "${CONFIG}" || return 1 - else - "${QUIET:-_print_center}" "justify" "Error: Something went wrong" ", printing error." "=" 1>&2 - printf "%s\n" "${response}" 1>&2 - return 1 - fi - } - - # export ACCESS_TOKEN and ACCESS_TOKEN_EXPIRY - _set_value direct ACCESS_TOKEN "${token_value}" - _set_value direct ACCESS_TOKEN_EXPIRY "${token_expiry_value}" - - # export INITIAL_ACCESS_TOKEN which is used on script cleanup - _set_value direct INITIAL_ACCESS_TOKEN "${ACCESS_TOKEN}" - return 0 -} - -################################################### -# load config file if available, else create a empty file -# uses global variable CONFIG -################################################### -_reload_config() { - { [[ -r ${CONFIG} ]] && . "${CONFIG}"; } || { printf "" >> "${CONFIG}" || return 1; } - return 0 -} - -################################################### -# launch a background service to check access token and update it -# checks ACCESS_TOKEN_EXPIRY, try to update before 5 mins of expiry, a fresh token gets 60 mins -# process will be killed when script exits or "${MAIN_PID}" is killed -# Globals: 4 variables, 1 function -# Variables - ACCESS_TOKEN, ACCESS_TOKEN_EXPIRY, MAIN_PID, TMPFILE -# Functions - _check_access_token -# Arguments: None -# Result: read description & export ACCESS_TOKEN_SERVICE_PID -################################################### -_token_bg_service() { - [[ -z ${MAIN_PID} ]] && return 0 # don't start if MAIN_PID is empty - printf "%b\n" "ACCESS_TOKEN=\"${ACCESS_TOKEN}\"\nACCESS_TOKEN_EXPIRY=\"${ACCESS_TOKEN_EXPIRY}\"" >| "${TMPFILE}_ACCESS_TOKEN" - { - until ! kill -0 "${MAIN_PID}" 2>| /dev/null 1>&2; do - . "${TMPFILE}_ACCESS_TOKEN" - CURRENT_TIME="$(printf "%(%s)T\\n" "-1")" - REMAINING_TOKEN_TIME="$((ACCESS_TOKEN_EXPIRY - CURRENT_TIME))" - if [[ ${REMAINING_TOKEN_TIME} -le 300 ]]; then - # timeout after 30 seconds, it shouldn't take too long anyway, and update tmp config - CONFIG="${TMPFILE}_ACCESS_TOKEN" _timeout 30 _check_access_token "" skip_check || : - else - TOKEN_PROCESS_TIME_TO_SLEEP="$(if [[ ${REMAINING_TOKEN_TIME} -le 301 ]]; then - printf "0\n" - else - printf "%s\n" "$((REMAINING_TOKEN_TIME - 300))" - fi)" - sleep "${TOKEN_PROCESS_TIME_TO_SLEEP}" - fi - sleep 1 - done - } & - export ACCESS_TOKEN_SERVICE_PID="${!}" - return 0 -} - -export -f _account_name_valid \ - _account_exists \ - _all_accounts \ - _set_new_account_name \ - _delete_account \ - _handle_old_config \ - _check_credentials \ - _check_account_credentials \ - _check_client \ - _check_refresh_token \ - _check_access_token \ - _reload_config - -################################################### -# Search for an existing file on gdrive with write permission. -# Globals: 3 variables, 2 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value -# Arguments: 4 -# ${1} = file name -# ${2} = root dir id of file -# ${3} = mode ( size or md5Checksum or empty ) -# ${4} = if mode = empty, then not required -# mode = size, then size -# mode = md5Checksum, then md5sum -# Result: print search response if id fetched -# check size and md5sum if mode size or md5Checksum -# Reference: -# https://developers.google.com/drive/api/v3/search-files -################################################### -_check_existing_file() { - [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare name="${1}" rootdir="${2}" mode="${3}" param_value="${4}" query search_response id - - "${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2 - query="$(_url_encode "name=\"${name}\" and '${rootdir}' in parents and trashed=false")" - - search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id,name,mimeType${mode:+,${mode}})&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - _json_value id 1 1 <<< "${search_response}" 2>| /dev/null 1>&2 || return 1 - - [[ -n ${mode} ]] && { - [[ "$(_json_value "${mode}" 1 1 <<< "${search_response}")" = "${param_value}" ]] || return 1 - } - - printf "%s\n" "${search_response}" - return 0 -} - -################################################### -# Copy/Clone a public gdrive file/folder from another/same gdrive account -# Globals: 6 variables, 6 functions -# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _print_center, _check_existing_file, _json_value, _json_escape _bytes_to_human, _clear_line -# Arguments: 5 -# ${1} = update or upload ( upload type ) -# ${2} = file id to upload -# ${3} = root dir id for file -# ${4} = name of file -# ${5} = size of file -# ${6} = md5sum of file -# Result: On -# Success - Upload/Update file and export FILE_ID -# Error - return 1 -# Reference: -# https://developers.google.com/drive/api/v2/reference/files/copy -################################################### -_clone_file() { - [[ $# -lt 5 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare job="${1}" file_id="${2}" file_root_id="${3}" name="${4}" size="${5}" md5="${6}" - declare clone_file_post_data clone_file_response readable_size _file_id description escaped_name && STRING="Cloned" - escaped_name="$(_json_escape j "${name}")" print_name="$(_json_escape p "${name}")" readable_size="$(_bytes_to_human "${size}")" - - # create description data - [[ -n ${DESCRIPTION_FILE} ]] && { - : "${DESCRIPTION_FILE//%f/${name}}" && : "${_//%s/${readable_size}}" - description="$(_json_escape j "${_}")" # escape for json - } - - clone_file_post_data="{\"parents\": [\"${file_root_id}\"]${description:+,\"description\":\"${description}\"}}" - - _print_center "justify" "${print_name} " "| ${readable_size}" "=" - - if [[ ${job} = update ]]; then - declare file_check_json check_value_type check_value - case "${CHECK_MODE}" in - 2) check_value_type="size" check_value="${size}" ;; - 3) check_value_type="md5Checksum" check_value="${md5}" ;; - esac - # Check if file actually exists. - if file_check_json="$(_check_existing_file "${escaped_name}" "${file_root_id}")"; then - if [[ -n ${SKIP_DUPLICATES} ]]; then - _collect_file_info "${file_check_json}" || return 1 - _clear_line 1 - "${QUIET:-_print_center}" "justify" "${print_name}" " already exists." "=" && return 0 - else - _print_center "justify" "Overwriting file.." "-" - { _file_id="$(_json_value id 1 1 <<< "${file_check_json}")" && - clone_file_post_data="$(_drive_info "${_file_id}" "parents,writersCanShare")"; } || - { _error_logging_upload "${print_name}" "${post_data:-${file_check_json}}" || return 1; } - if [[ ${_file_id} != "${file_id}" ]]; then - _api_request -s \ - -X DELETE \ - "${API_URL}/drive/${API_VERSION}/files/${_file_id}?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>| /dev/null 1>&2 || : - STRING="Updated" - else - _collect_file_info "${file_check_json}" || return 1 - fi - fi - else - "${EXTRA_LOG}" "justify" "Cloning file.." "-" - fi - else - "${EXTRA_LOG}" "justify" "Cloning file.." "-" - fi - - # shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic. - clone_file_response="$(_api_request ${CURL_PROGRESS} \ - -X POST \ - -H "Content-Type: application/json; charset=UTF-8" \ - -d "${clone_file_post_data}" \ - "${API_URL}/drive/${API_VERSION}/files/${file_id}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" - for _ in 1 2 3; do _clear_line 1; done - _collect_file_info "${clone_file_response}" || return 1 - "${QUIET:-_print_center}" "justify" "${print_name} " "| ${readable_size} | ${STRING}" "=" - return 0 -} - -################################################### -# Create/Check directory in google drive. -# Globals: 3 variables, 3 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value, _json_escape -# Arguments: 2 -# ${1} = dir name -# ${2} = root dir id of given dir -# Result: print folder id -# Reference: -# https://developers.google.com/drive/api/v3/folder -################################################### -_create_directory() { - [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare dirname="${1##*/}" escaped_dirname rootdir="${2}" query search_response folder_id - escaped_dirname="$(_json_escape j "${dirname}")" print_dirname="$(_json_escape p "${dirname}")" - - "${EXTRA_LOG}" "justify" "Creating gdrive folder:" " ${print_dirname}" "-" 1>&2 - query="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name=\"${escaped_dirname}\" and trashed=false and '${rootdir}' in parents")" - - search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - "${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - - if ! folder_id="$(printf "%s\n" "${search_response}" | _json_value id 1 1)"; then - declare create_folder_post_data create_folder_response - create_folder_post_data="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${escaped_dirname}\",\"parents\": [\"${rootdir}\"]}" - create_folder_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - -X POST \ - -H "Content-Type: application/json; charset=UTF-8" \ - -d "${create_folder_post_data}" \ - "${API_URL}/drive/${API_VERSION}/files?fields=id&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - fi - _clear_line 1 1>&2 - - { folder_id="${folder_id:-$(_json_value id 1 1 <<< "${create_folder_response}")}" && printf "%s\n" "${folder_id}"; } || - { printf "%s\n" "${create_folder_response}" 1>&2 && return 1; } - return 0 -} - -################################################### -# Get information for a gdrive folder/file. -# Globals: 3 variables, 1 function -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _json_value -# Arguments: 2 -# ${1} = folder/file gdrive id -# ${2} = information to fetch, e.g name, id -# Result: On -# Success - print fetched value -# Error - print "message" field from the json -# Reference: -# https://developers.google.com/drive/api/v3/search-files -################################################### -_drive_info() { - [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare folder_id="${1}" fetch="${2}" search_response - - "${EXTRA_LOG}" "justify" "Fetching info.." "-" 1>&2 - search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - "${API_URL}/drive/${API_VERSION}/files/${folder_id}?fields=${fetch}&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - printf "%b" "${search_response:+${search_response}\n}" - return 0 -} - -################################################### -# Extract ID from a googledrive folder/file url. -# Globals: None -# Arguments: 1 -# ${1} = googledrive folder/file url. -# Result: print extracted ID -################################################### -_extract_id() { - [[ $# = 0 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare LC_ALL=C ID="${1}" - case "${ID}" in - *'drive.google.com'*'id='*) ID="${ID##*id=}" && ID="${ID%%\?*}" && ID="${ID%%\&*}" ;; - *'drive.google.com'*'file/d/'* | 'http'*'docs.google.com'*'/d/'*) ID="${ID##*\/d\/}" && ID="${ID%%\/*}" && ID="${ID%%\?*}" && ID="${ID%%\&*}" ;; - *'drive.google.com'*'drive'*'folders'*) ID="${ID##*\/folders\/}" && ID="${ID%%\?*}" && ID="${ID%%\&*}" ;; - esac - printf "%b" "${ID:+${ID}\n}" -} - -################################################### -# Upload ( Create/Update ) files on gdrive. -# Interrupted uploads can be resumed. -# Globals: 8 variables, 11 functions -# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _url_encode, _json_value, _json_escape _print_center, _bytes_to_human, _check_existing_file -# _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session -# _full_upload, _collect_file_info -# Arguments: 3 -# ${1} = update or upload ( upload type ) -# ${2} = file to upload -# ${3} = root dir id for file -# Result: On -# Success - Upload/Update file and export FILE_ID -# Error - return 1 -# Reference: -# https://developers.google.com/drive/api/v3/create-file -# https://developers.google.com/drive/api/v3/manage-uploads -# https://developers.google.com/drive/api/v3/reference/files/update -################################################### -_upload_file() { - [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare job="${1}" input="${2}" folder_id="${3}" \ - slug escaped_slug inputname extension inputsize readable_size request_method url postdata uploadlink upload_body mime_type description \ - resume_args1 resume_args2 resume_args3 - - slug="${input##*/}" escaped_slug="$(_json_escape j "${slug}")" print_slug="$(_json_escape p "${slug}")" - inputname="${slug%.*}" - extension="${slug##*.}" - inputsize="$(($(wc -c < "${input}")))" && content_length="${inputsize}" - readable_size="$(_bytes_to_human "${inputsize}")" - - # Handle extension-less files - [[ ${inputname} = "${extension}" ]] && declare mime_type && { - mime_type="$(file --brief --mime-type "${input}" || mimetype --output-format %m "${input}")" 2>| /dev/null || { - "${QUIET:-_print_center}" "justify" "Error: file or mimetype command not found." "=" && printf "\n" - exit 1 - } - } - - # create description data - [[ -n ${DESCRIPTION_FILE} ]] && { - : "${DESCRIPTION_FILE//%f/${slug}}" && : "${_//%s/${inputsize}}" && : "${_//%m/${mime_type}}" - description="$(_json_escape j "${_}")" # escape for json - } - - _print_center "justify" "${print_slug}" " | ${readable_size}" "=" - - # Set proper variables for overwriting files - [[ ${job} = update ]] && { - declare file_check_json check_value - case "${CHECK_MODE}" in - 2) check_value_type="size" check_value="${inputsize}" ;; - 3) - check_value_type="md5Checksum" - check_value="$(md5sum "${input}")" || { - "${QUIET:-_print_center}" "justify" "Error: cannot calculate md5sum of given file." "=" 1>&2 - return 1 - } - check_value="${check_value%% *}" - ;; - esac - # Check if file actually exists, and create if not. - if file_check_json="$(_check_existing_file "${escaped_slug}" "${folder_id}" "${check_value_type}" "${check_value}")"; then - if [[ -n ${SKIP_DUPLICATES} ]]; then - # Stop upload if already exists ( -d/--skip-duplicates ) - _collect_file_info "${file_check_json}" "${escaped_slug}" || return 1 - _clear_line 1 - "${QUIET:-_print_center}" "justify" "${print_slug}" " already exists." "=" && return 0 - else - request_method="PATCH" - _file_id="$(_json_value id 1 1 <<< "${file_check_json}")" || - { _error_logging_upload "${print_slug}" "${file_check_json}" || return 1; } - url="${API_URL}/upload/drive/${API_VERSION}/files/${_file_id}?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" - # JSON post data to specify the file name and folder under while the file to be updated - postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${escaped_slug}\",\"addParents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" - STRING="Updated" - fi - else - job="create" - fi - } - - # Set proper variables for creating files - [[ ${job} = create ]] && { - url="${API_URL}/upload/drive/${API_VERSION}/files?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" - request_method="POST" - # JSON post data to specify the file name and folder under while the file to be created - postdata="{\"mimeType\": \"${mime_type}\",\"name\": \"${escaped_slug}\",\"parents\": [\"${folder_id}\"]${description:+,\"description\":\"${description}\"}}" - STRING="Uploaded" - } - - __file="${HOME}/.google-drive-upload/${print_slug}__::__${folder_id}__::__${inputsize}" - # https://developers.google.com/drive/api/v3/manage-uploads - if [[ -r "${__file}" ]]; then - uploadlink="$(< "${__file}")" - http_code="$(curl --compressed -s -X PUT "${uploadlink}" -o /dev/null --write-out %"{http_code}")" || : - case "${http_code}" in - 308) # Active Resumable URI give 308 status - uploaded_range="$(: "$(curl --compressed -s -X PUT \ - -H "Content-Range: bytes */${inputsize}" \ - --url "${uploadlink}" --globoff -D - || :)" && - : "$(printf "%s\n" "${_/*[R,r]ange: bytes=0-/}")" && read -r firstline <<< "$_" && printf "%s\n" "${firstline//$'\r'/}")" - if [[ ${uploaded_range} -gt 0 ]]; then - _print_center "justify" "Resuming interrupted upload.." "-" && _newline "\n" - content_range="$(printf "bytes %s-%s/%s\n" "$((uploaded_range + 1))" "$((inputsize - 1))" "${inputsize}")" - content_length="$((inputsize - $((uploaded_range + 1))))" - # Resuming interrupted uploads needs http1.1 - resume_args1='-s' resume_args2='--http1.1' resume_args3="Content-Range: ${content_range}" - _upload_file_from_uri _clear_line - _collect_file_info "${upload_body}" "${print_slug}" || return 1 - _normal_logging_upload - _remove_upload_session - else - _full_upload || return 1 - fi - ;; - 201 | 200) # Completed Resumable URI give 20* status - upload_body="${http_code}" - _collect_file_info "${upload_body}" "${print_slug}" || return 1 - _normal_logging_upload - _remove_upload_session - ;; - 4[0-9][0-9] | 000 | *) # Dead Resumable URI give 40* status - _full_upload || return 1 - ;; - esac - else - _full_upload || return 1 - fi - return 0 -} - -################################################### -# Sub functions for _upload_file function - Start -# generate resumable upload link -_generate_upload_link() { - "${EXTRA_LOG}" "justify" "Generating upload link.." "-" 1>&2 - uploadlink="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - -X "${request_method}" \ - -H "Content-Type: application/json; charset=UTF-8" \ - -H "X-Upload-Content-Type: ${mime_type}" \ - -H "X-Upload-Content-Length: ${inputsize}" \ - -d "$postdata" \ - "${url}" \ - -D - || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - case "${uploadlink}" in - *'ocation: '*'upload_id'*) uploadlink="$(read -r firstline <<< "${uploadlink/*[L,l]ocation: /}" && printf "%s\n" "${firstline//$'\r'/}")" && return 0 ;; - '' | *) return 1 ;; - esac - - return 0 -} - -# Curl command to push the file to google drive. -_upload_file_from_uri() { - _print_center "justify" "Uploading.." "-" - # shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic. - upload_body="$(_api_request ${CURL_PROGRESS} \ - -X PUT \ - -H "Content-Type: ${mime_type}" \ - -H "Content-Length: ${content_length}" \ - -H "Slug: ${print_slug}" \ - -T "${input}" \ - -o- \ - --url "${uploadlink}" \ - --globoff \ - ${CURL_SPEED} ${resume_args1} ${resume_args2} \ - -H "${resume_args3}" || :)" - [[ -z ${VERBOSE_PROGRESS} ]] && for _ in 1 2; do _clear_line 1; done && "${1:-:}" - return 0 -} - -# logging in case of successful upload -_normal_logging_upload() { - [[ -z ${VERBOSE_PROGRESS} ]] && _clear_line 1 - "${QUIET:-_print_center}" "justify" "${print_slug} " "| ${readable_size} | ${STRING}" "=" - return 0 -} - -# Tempfile Used for resuming interrupted uploads -_log_upload_session() { - [[ ${inputsize} -gt 1000000 ]] && printf "%s\n" "${uploadlink}" >| "${__file}" - return 0 -} - -# remove upload session -_remove_upload_session() { - rm -f "${__file}" - return 0 -} - -# wrapper to fully upload a file from scratch -_full_upload() { - _generate_upload_link || { _error_logging_upload "${print_slug}" "${uploadlink}" || return 1; } - _log_upload_session - _upload_file_from_uri - _collect_file_info "${upload_body}" "${print_slug}" || return 1 - _normal_logging_upload - _remove_upload_session - return 0 -} -# Sub functions for _upload_file function - End -################################################### - -################################################### -# Share a gdrive file/folder -# Globals: 3 variables, 4 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value, _print_center, _clear_line -# Arguments: 2 -# ${1} = gdrive ID of folder/file -# ${2} = Email to which file will be shared ( optional ) -# Result: read description -# Reference: -# https://developers.google.com/drive/api/v3/manage-sharing -################################################### -_share_id() { - [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare id="${1}" role="${2:?Missing role}" share_email="${3}" - declare type="${share_email:+user}" share_post_data share_post_data share_response - - "${EXTRA_LOG}" "justify" "Sharing.." "-" 1>&2 - share_post_data="{\"role\":\"${role}\",\"type\":\"${type:-anyone}\"${share_email:+,\"emailAddress\":\"${share_email}\"}}" - - share_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - -X POST \ - -H "Content-Type: application/json; charset=UTF-8" \ - -d "${share_post_data}" \ - "${API_URL}/drive/${API_VERSION}/files/${id}/permissions?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - { _json_value id 1 1 <<< "${share_response}" 2>| /dev/null 1>&2 && return 0; } || - { printf "%s\n" "Error: Cannot Share." 1>&2 && printf "%s\n" "${share_response}" 1>&2 && return 1; } -} - -export -f _check_existing_file \ - _clone_file \ - _create_directory \ - _drive_info \ - _extract_id \ - _upload_file \ - _generate_upload_link \ - _upload_file_from_uri \ - _normal_logging_upload \ - _log_upload_session \ - _remove_upload_session \ - _full_upload \ - _share_id -# shellcheck source=/dev/null - -################################################### -# A simple wrapper to check tempfile for access token and make authorized oauth requests to drive api -################################################### -_api_request() { - . "${TMPFILE}_ACCESS_TOKEN" - - curl --compressed \ - -H "Authorization: Bearer ${ACCESS_TOKEN}" \ - "${@}" -} - -################################################### -# Used in collecting file properties from output json after a file has been uploaded/cloned -# Also handles logging in log file if LOG_FILE_ID is set -# Globals: 1 variables, 2 functions -# Variables - LOG_FILE_ID -# Functions - _error_logging_upload, _json_value -# Arguments: 1 -# ${1} = output jsom -# Result: set fileid and link, save info to log file if required -################################################### -_collect_file_info() { - declare json="${1}" info - FILE_ID="$(_json_value id 1 1 <<< "${json}")" || { _error_logging_upload "${2}" "${json}" || return 1; } - [[ -z ${LOG_FILE_ID} || -d ${LOG_FILE_ID} ]] && return 0 - info="Link: https://drive.google.com/open?id=${FILE_ID} -Name: $(_json_value name 1 1 <<< "${json}" || :) -ID: ${FILE_ID} -Type: $(_json_value mimeType 1 1 <<< "${json}" || :)" - printf "%s\n\n" "${info}" >> "${LOG_FILE_ID}" - return 0 -} - -################################################### -# Error logging wrapper -################################################### -_error_logging_upload() { - declare log="${2}" - "${QUIET:-_print_center}" "justify" "Upload ERROR" ", ${1:-} not ${STRING:-uploaded}." "=" 1>&2 - case "${log}" in - # https://github.com/rclone/rclone/issues/3857#issuecomment-573413789 - *'"message": "User rate limit exceeded."'*) - printf "%s\n\n%s\n" "${log}" \ - "Today's upload limit reached for this account. Use another account to upload or wait for tomorrow." 1>&2 - # Never retry if upload limit reached - export RETRY=0 - ;; - '' | *) printf "%s\n" "${log}" 1>&2 ;; - esac - printf "\n\n\n" 1>&2 - return 1 -} - -################################################### -# A small function to get rootdir id for files in sub folder uploads -# Globals: 1 variable, 1 function -# Variables - DIRIDS -# Functions - _dirname -# Arguments: 1 -# ${1} = filename -# Result: read discription -################################################### -_get_rootdir_id() { - declare file="${1:?Error: give filename}" __rootdir __temp - __rootdir="$(_dirname "${file}")" - __temp="$(grep -F "|:_//_:|${__rootdir}|:_//_:|" <<< "${DIRIDS:?Error: DIRIDS Missing}" || :)" - printf "%s\n" "${__temp%%"|:_//_:|${__rootdir}|:_//_:|"}" - return 0 -} - -################################################### -# A extra wrapper for _upload_file function to properly handle retries -# also handle uploads in case uploading from folder -# Globals: 2 variables, 1 function -# Variables - RETRY, UPLOAD_MODE -# Functions - _upload_file -# Arguments: 3 -# ${1} = parse or norparse -# ${2} = file path -# ${3} = if ${1} != parse; gdrive folder id to upload; fi -# Result: set SUCCESS var on success -################################################### -_upload_file_main() { - [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare file="${2}" dirid _sleep - { [[ ${1} = parse ]] && dirid="$(_get_rootdir_id "${file}")"; } || dirid="${3}" - - retry="${RETRY:-0}" && unset RETURN_STATUS - until [[ ${retry} -le 0 ]] && [[ -n ${RETURN_STATUS} ]]; do - if [[ -n ${4} ]]; then - { _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break; } || RETURN_STATUS=2 - else - { _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" && RETURN_STATUS=1 && break; } || RETURN_STATUS=2 - fi - # decrease retry using -=, skip sleep if all retries done - [[ $((retry -= 1)) -lt 1 ]] && sleep "$((_sleep += 1))" - # on every retry, sleep the times of retry it is, e.g for 1st, sleep 1, for 2nd, sleep 2 - continue - done - [[ -n ${4} ]] && { - { [[ ${RETURN_STATUS} = 1 ]] && printf "%s\n" "${file}"; } || printf "%s\n" "${file}" 1>&2 - } - return 0 -} - -################################################### -# Upload all files in the given folder, parallelly or non-parallely and show progress -# Globals: 7 variables, 3 functions -# Variables - VERBOSE, VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET -# Functions - _clear_line, _newline, _print_center and _upload_file_main -# Arguments: 4 -# ${1} = parallel or normal -# ${2} = parse or norparse -# ${3} = filenames with full path -# ${4} = if ${2} != parse; then gdrive folder id to upload; fi -# Result: read discription, set SUCCESS_STATUS & ERROR_STATUS -################################################### -_upload_folder() { - [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare mode="${1}" files="${3}" && PARSE_MODE="${2}" ID="${4:-}" && export PARSE_MODE ID - SUCCESS_STATUS=0 SUCCESS_FILES="" ERROR_STATUS=0 ERROR_FILES="" - case "${mode}" in - normal) - [[ ${PARSE_MODE} = parse ]] && _clear_line 1 && _newline "\n" - - while read -u 4 -r file; do - _upload_file_main "${PARSE_MODE}" "${file}" "${ID}" - { [[ ${RETURN_STATUS} = 1 ]] && : "$((SUCCESS_STATUS += 1))" && SUCCESS_FILES+="${file}"$'\n'; } || - { : "$((ERROR_STATUS += 1))" && ERROR_FILES+="${file}"$'\n'; } - if [[ -n ${VERBOSE:-${VERBOSE_PROGRESS}} ]]; then - _print_center "justify" "Status: ${SUCCESS_STATUS} Uploaded" " | ${ERROR_STATUS} Failed" "=" && _newline "\n" - else - for _ in 1 2; do _clear_line 1; done - _print_center "justify" "Status: ${SUCCESS_STATUS} Uploaded" " | ${ERROR_STATUS} Failed" "=" - fi - done 4<<< "${files}" - ;; - parallel) - NO_OF_PARALLEL_JOBS_FINAL="$((NO_OF_PARALLEL_JOBS > NO_OF_FILES ? NO_OF_FILES : NO_OF_PARALLEL_JOBS))" - [[ -f "${TMPFILE}"SUCCESS ]] && rm "${TMPFILE}"SUCCESS - [[ -f "${TMPFILE}"ERROR ]] && rm "${TMPFILE}"ERROR - - # shellcheck disable=SC2016 - printf "%s\n" "${files}" | xargs -P"${NO_OF_PARALLEL_JOBS_FINAL}" -I "{}" -n 1 bash -c ' - _upload_file_main "${PARSE_MODE}" "{}" "${ID}" true - ' 1>| "${TMPFILE}"SUCCESS 2>| "${TMPFILE}"ERROR & - pid="${!}" - - until [[ -f "${TMPFILE}"SUCCESS ]] || [[ -f "${TMPFILE}"ERORR ]]; do sleep 0.5; done - [[ ${PARSE_MODE} = parse ]] && _clear_line 1 - _newline "\n" - - until ! kill -0 "${pid}" 2>| /dev/null 1>&2; do - SUCCESS_STATUS="$(_count < "${TMPFILE}"SUCCESS)" - ERROR_STATUS="$(_count < "${TMPFILE}"ERROR)" - sleep 1 - [[ $((SUCCESS_STATUS + ERROR_STATUS)) != "${TOTAL}" ]] && - _clear_line 1 && "${QUIET:-_print_center}" "justify" "Status" ": ${SUCCESS_STATUS} Uploaded | ${ERROR_STATUS} Failed" "=" - TOTAL="$((SUCCESS_STATUS + ERROR_STATUS))" - done - SUCCESS_STATUS="$(_count < "${TMPFILE}"SUCCESS)" SUCCESS_FILES="$(< "${TMPFILE}"SUCCESS)" - ERROR_STATUS="$(_count < "${TMPFILE}"ERROR)" ERROR_FILES="$(< "${TMPFILE}"ERROR)" - ;; - esac - return 0 -} - -export -f _api_request \ - _collect_file_info \ - _error_logging_upload \ - _get_rootdir_id \ - _upload_file_main \ - _upload_folder -# Upload a file to Google Drive -# shellcheck source=/dev/null - -_usage() { - printf "%b" " -The script can be used to upload file/directory to google drive.\n -Usage:\n ${0##*/} [options.. ] \n -Foldername argument is optional. If not provided, the file will be uploaded to preconfigured google drive.\n -File name argument is optional if create directory option is used.\n -Options:\n - -a | --account 'account name' - Use different account than the default one.\n - To change the default account name, use this format, -a/--account default=account_name\n - -la | --list-accounts - Print all configured accounts in the config files.\n - -ca | --create-account 'account name' - To create a new account with the given name if does not already exists.\n - -da | --delete-account 'account name' - To delete an account information from config file. \n - -c | -C | --create-dir - option to create directory. Will provide folder id. Can be used to provide input folder, see README.\n - -r | --root-dir or - google folder ID/URL to which the file/directory is going to upload. - If you want to change the default value, then use this format, -r/--root-dir default=root_folder_id/root_folder_url\n - -s | --skip-subdirs - Skip creation of sub folders and upload all files inside the INPUT folder/sub-folders in the INPUT folder, use this along with -p/--parallel option to speed up the uploads.\n - -p | --parallel - Upload multiple files in parallel, Max value = 10.\n - -f | --[file|folder] - Specify files and folders explicitly in one command, use multiple times for multiple folder/files. See README for more use of this command.\n - -cl | --clone - Upload a gdrive file without downloading, require accessible gdrive link or id as argument.\n - -o | --overwrite - Overwrite the files with the same name, if present in the root folder/input folder, also works with recursive folders.\n - -d | --skip-duplicates - Do not upload the files with the same name and size, if already present in the root folder/input folder, also works with recursive folders.\n - -cm | --check-mode - Additional flag for --overwrite and --skip-duplicates flag. Can be used to change check mode in those flags, available args are 'size' and 'md5'.\n - -desc | --description | --description-all - Specify description for the given file. To use the respective metadata of a file, below is the format:\n - File name ( fullname ): %f | Size: %s | Mime Type: %m\n - Now to actually use it: --description 'Filename: %f, Size: %s, Mime: %m'\n - Note: For files inside folders, use --description-all flag.\n - -S | --share - Share the uploaded input file/folder, grant reader permission to provided email address or to everyone with the shareable link.\n - -SM | -sm | --share-mode 'share mode' - Specify the share mode for sharing file.\n - Share modes are: r / reader - Read only permission.\n - : w / writer - Read and write permission.\n - : c / commenter - Comment only permission.\n - Note: Although this flag is independent of --share flag but when email is needed, then --share flag use is neccessary.\n - --speed 'speed' - Limit the download speed, supported formats: 1K, 1M and 1G.\n - -i | --save-info - Save uploaded files info to the given filename.\n - -z | --config - Override default config file with custom config file.\nIf you want to change default value, then use this format -z/--config default=default=your_config_file_path.\n - -q | --quiet - Supress the normal output, only show success/error upload messages for files, and one extra line at the beginning for folder showing no. of files and sub folders.\n - -R | --retry 'num of retries' - Retry the file upload if it fails, postive integer as argument. Currently only for file uploads.\n - -in | --include 'pattern' - Only include the files with the given pattern to upload - Applicable for folder uploads.\n - e.g: ${0##*/} local_folder --include "*1*", will only include with files with pattern '1' in the name.\n - -ex | --exclude 'pattern' - Exclude the files with the given pattern from uploading. - Applicable for folder uploads.\n - e.g: ${0##*/} local_folder --exclude "*1*", will exclude all the files pattern '1' in the name.\n - --hide - This flag will prevent the script to print sensitive information like root folder id or drivelink.\n - -v | --verbose - Display detailed message (only for non-parallel uploads).\n - -V | --verbose-progress - Display detailed message and detailed upload progress(only for non-parallel uploads).\n - --skip-internet-check - Do not check for internet connection, recommended to use in sync jobs. - $([[ ${GUPLOAD_INSTALLED_WITH} = script ]] && printf '%s\n' '\n -u | --update - Update the installed script in your system.\n - -U | --uninstall - Uninstall script, remove related files.\n') - --info - Show detailed info, only if script is installed system wide.\n - -D | --debug - Display script command trace.\n - -h | --help - Display this message.\n" - exit 0 -} - -_short_help() { - printf "No valid arguments provided, use -h/--help flag to see usage.\n" - exit 0 -} - -################################################### -# Print info if installed -# Globals: 7 variable -# COMMAND_NAME REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA -# Arguments: None -# Result: read description -################################################### -_version_info() { - if command -v "${COMMAND_NAME}" 1> /dev/null && [[ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]]; then - for i in REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA CONFIG; do - printf "%s\n" "${i}=\"${!i}\"" - done | sed -e "s/=/: /g" - else - printf "%s\n" "google-drive-upload is not installed system wide." - fi - exit 0 -} - -################################################### -# Function to cleanup config file -# Remove invalid access tokens on the basis of corresponding expiry -# Globals: None -# Arguments: 1 -# ${1} = config file -# Result: read description -################################################### -_cleanup_config() { - declare config="${1:?Error: Missing config}" values_regex - - ! [ -f "${config}" ] && return 0 - - while read -r line && [[ -n ${line} ]]; do - expiry_value_name="${line%%=*}" - token_value_name="${expiry_value_name%%_EXPIRY}" - - : "${line##*=}" && : "${_%\"}" && expiry="${_#\"}" - [[ ${expiry} -le "$(printf "%(%s)T\\n" "-1")" ]] && - values_regex="${values_regex:+${values_regex}|}${expiry_value_name}=\".*\"|${token_value_name}=\".*\"" - - done <<< "$(grep -F ACCESS_TOKEN_EXPIRY "${config}" || :)" - - chmod u+w "${config}" && - printf "%s\n" "$(grep -Ev "^\$${values_regex:+|${values_regex}}" "${config}")" >| "${config}" && - chmod "a-w-r-x,u+r" "${config}" - return 0 -} - -################################################### -# Process all arguments given to the script -# Globals: 2 variable, 1 function -# Variable - HOME, CONFIG -# Functions - _short_help -# Arguments: Many -# ${@} = Flags with argument and file/folder input -# Result: On -# Success - Set all the variables -# Error - Print error message and exit -# Reference: -# Email Regex - https://gist.github.com/guessi/82a73ee7eb2b1216eb9db17bb8d65dd1 -################################################### -_setup_arguments() { - [[ $# = 0 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - # Internal variables - # De-initialize if any variables set already. - unset LIST_ACCOUNTS UPDATE_DEFAULT_ACCOUNT CUSTOM_ACCOUNT_NAME NEW_ACCOUNT_NAME DELETE_ACCOUNT_NAME ACCOUNT_ONLY_RUN - unset FOLDERNAME LOCAL_INPUT_ARRAY ID_INPUT_ARRAY CONTINUE_WITH_NO_INPUT - unset PARALLEL NO_OF_PARALLEL_JOBS SHARE SHARE_ROLE SHARE_EMAIL OVERWRITE SKIP_DUPLICATES CHECK_MODE DESCRIPTION SKIP_SUBDIRS ROOTDIR QUIET - unset VERBOSE VERBOSE_PROGRESS DEBUG LOG_FILE_ID CURL_SPEED RETRY - export CURL_PROGRESS="-s" EXTRA_LOG=":" CURL_PROGRESS_EXTRA="-s" - INFO_PATH="${HOME}/.google-drive-upload" CONFIG_INFO="${INFO_PATH}/google-drive-upload.configpath" - [[ -f ${CONFIG_INFO} ]] && . "${CONFIG_INFO}" - CONFIG="${CONFIG:-${HOME}/.googledrive.conf}" - - # Configuration variables # Remote gDrive variables - unset ROOT_FOLDER ROOT_FOLDER_NAME CLIENT_ID CLIENT_SECRET REFRESH_TOKEN ACCESS_TOKEN - export API_URL="https://www.googleapis.com" - export API_VERSION="v3" \ - SCOPE="${API_URL}/auth/drive" \ - REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" \ - TOKEN_URL="https://accounts.google.com/o/oauth2/token" - - _check_config() { - [[ ${1} = default* ]] && export UPDATE_DEFAULT_CONFIG="_update_config" - { [[ -r ${2} ]] && CONFIG="${2}"; } || { - printf "Error: Given config file (%s) doesn't exist/not readable,..\n" "${1}" 1>&2 && exit 1 - } - return 0 - } - - _check_longoptions() { - [[ -z ${2} ]] && - printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && - exit 1 - return 0 - } - - while [[ $# -gt 0 ]]; do - case "${1}" in - -h | --help) _usage ;; - -D | --debug) DEBUG="true" && export DEBUG ;; - --info) _version_info ;; - -a | --account) - _check_longoptions "${1}" "${2}" - export CUSTOM_ACCOUNT_NAME="${2/default=/}" && shift - [[ ${2} = default* ]] && export UPDATE_DEFAULT_ACCOUNT="_update_config" - ;; - -la | --list-account) export LIST_ACCOUNTS="true" ;; - # this flag is preferred over --account - -ca | --create-account) - _check_longoptions "${1}" "${2}" - export NEW_ACCOUNT_NAME="${2}" && shift - ;; - -da | --delete-account) - _check_longoptions "${1}" "${2}" - export DELETE_ACCOUNT_NAME="${2}" && shift - ;; - -c | -C | --create-dir) - _check_longoptions "${1}" "${2}" - FOLDERNAME="${2}" && shift - ;; - -r | --root-dir) - _check_longoptions "${1}" "${2}" - ROOTDIR="${2/default=/}" - [[ ${2} = default* ]] && UPDATE_DEFAULT_ROOTDIR="_update_config" - shift - ;; - -z | --config) - _check_longoptions "${1}" "${2}" - _check_config "${2}" "${2/default=/}" - shift - ;; - -i | --save-info) - _check_longoptions "${1}" "${2}" - export LOG_FILE_ID="${2}" && shift - ;; - -s | --skip-subdirs) export SKIP_SUBDIRS="true" ;; - -p | --parallel) - _check_longoptions "${1}" "${2}" - NO_OF_PARALLEL_JOBS="${2}" - if [[ ${2} -gt 0 ]]; then - NO_OF_PARALLEL_JOBS="$((NO_OF_PARALLEL_JOBS > 10 ? 10 : NO_OF_PARALLEL_JOBS))" - else - printf "\nError: -p/--parallel value ranges between 1 to 10.\n" - exit 1 - fi - export PARALLEL_UPLOAD="parallel" && shift - ;; - -o | --overwrite) export OVERWRITE="Overwrite" UPLOAD_MODE="update" ;; - -d | --skip-duplicates) export SKIP_DUPLICATES="Skip Existing" UPLOAD_MODE="update" ;; - -cm | --check-mode) - _check_longoptions "${1}" "${2}" - case "${2}" in - size) export CHECK_MODE="2" && shift ;; - md5) export CHECK_MODE="3" && shift ;; - *) printf "\nError: -cm/--check-mode takes size and md5 as argument.\n" ;; - esac - ;; - -desc | --description | --description-all) - _check_longoptions "${1}" "${2}" - [[ ${1} = "--description-all" ]] && export DESCRIPTION_ALL="true" - export DESCRIPTION="${2}" && shift - ;; - -f | --file | --folder) - _check_longoptions "${1}" "${2}" - LOCAL_INPUT_ARRAY+=("${2}") && shift - ;; - -cl | --clone) - _check_longoptions "${1}" "${2}" - FINAL_ID_INPUT_ARRAY+=("$(_extract_id "${2}")") && shift - ;; - -S | --share) - SHARE="_share_id" - EMAIL_REGEX="^(([A-Za-z0-9]+((\.|\-|\_|\+)?[A-Za-z0-9]?)*[A-Za-z0-9]+)|[A-Za-z0-9]+)@(([A-Za-z0-9]+)+((\.|\-|\_)?([A-Za-z0-9]+)+)*)+\.([A-Za-z]{2,})+$" - if [[ -n ${2} && ! ${2} = -* && ${2} =~ ${EMAIL_REGEX} ]]; then - SHARE_EMAIL="${2}" && shift && export SHARE_EMAIL - fi - SHARE_ROLE="${SHARE_ROLE:-reader}" - ;; - -[Ss][Mm] | --share-mode) - _check_longoptions "${1}" "${2}" - case "${2}" in - r | read*) SHARE_ROLE="reader" ;; - w | write*) SHARE_ROLE="writer" ;; - c | comment*) SHARE_ROLE="commenter" ;; - *) - printf "%s\n" "Invalid share mode given ( ${2} ). Supported values are r or reader / w or writer / c or commenter." && - exit 1 - ;; - esac - SHARE="_share_id" - shift - ;; - --speed) - _check_longoptions "${1}" "${2}" - regex='^([0-9]+)([k,K]|[m,M]|[g,G])+$' - if [[ ${2} =~ ${regex} ]]; then - export CURL_SPEED="--limit-rate ${2}" && shift - else - printf "Error: Wrong speed limit format, supported formats: 1K , 1M and 1G\n" 1>&2 - exit 1 - fi - ;; - -R | --retry) - _check_longoptions "${1}" "${2}" - if [[ ${2} -gt 0 ]]; then - export RETRY="${2}" && shift - else - printf "Error: -R/--retry only takes positive integers as arguments, min = 1, max = infinity.\n" - exit 1 - fi - ;; - -in | --include) - _check_longoptions "${1}" "${2}" - INCLUDE_FILES="${INCLUDE_FILES} -name '${2}' " && shift - ;; - -ex | --exclude) - _check_longoptions "${1}" "${2}" - EXCLUDE_FILES="${EXCLUDE_FILES} ! -name '${2}' " && shift - ;; - --hide) HIDE_INFO=":" ;; - -q | --quiet) export QUIET="_print_center_quiet" ;; - -v | --verbose) export VERBOSE="true" ;; - -V | --verbose-progress) export VERBOSE_PROGRESS="true" ;; - --skip-internet-check) SKIP_INTERNET_CHECK=":" ;; - '') shorthelp ;; - *) # Check if user meant it to be a flag - if [[ ${1} = -* ]]; then - [[ ${GUPLOAD_INSTALLED_WITH} = script ]] && { - case "${1}" in - -u | --update) - _check_debug && _update && { exit 0 || exit 1; } - ;; - --uninstall) - _check_debug && _update uninstall && { exit 0 || exit 1; } - ;; - esac - } - printf '%s: %s: Unknown option\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && exit 1 - else - if [[ ${1} =~ (drive.google.com|docs.google.com) ]]; then - FINAL_ID_INPUT_ARRAY+=("$(_extract_id "${1}")") - else - # If no "-" is detected in 1st arg, it adds to input - LOCAL_INPUT_ARRAY+=("${1}") - fi - fi - ;; - esac - shift - done - - _check_debug - - [[ -n ${VERBOSE_PROGRESS} ]] && unset VERBOSE && export CURL_PROGRESS="" - [[ -n ${QUIET} ]] && export CURL_PROGRESS="-s" - - # create info path folder, can be missing if gupload was not installed with install.sh - mkdir -p "${INFO_PATH}" - - unset Aseen && declare -A Aseen - for input in "${LOCAL_INPUT_ARRAY[@]}"; do - { [[ ${Aseen[${input}]} ]] && continue; } || Aseen[${input}]=x - { [[ -r ${input} ]] && FINAL_LOCAL_INPUT_ARRAY+=("${input}"); } || { - { "${QUIET:-_print_center}" 'normal' "[ Error: Invalid Input - ${input} ]" "=" && printf "\n"; } 1>&2 - continue - } - done - - # handle account related flags here as we want to use the flags independenlty even with no normal valid inputs - # delete account, --delete-account flag - # TODO: add support for deleting multiple accounts - [[ -n ${DELETE_ACCOUNT_NAME} ]] && _delete_account "${DELETE_ACCOUNT_NAME}" - # list all configured accounts, --list-accounts flag - [[ -n ${LIST_ACCOUNTS} ]] && _all_accounts - - # If no input, then check if either -C option was used. - [[ -z ${FINAL_LOCAL_INPUT_ARRAY[*]:-${FINAL_ID_INPUT_ARRAY[*]:-${FOLDERNAME:-}}} ]] && { - # if any account related option was used then don't show short help - [[ -z ${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-${NEW_ACCOUNT_NAME}}} ]] && _short_help - # exit right away if --list-accounts or --delete-account flag was used - [[ -n ${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-}} ]] && exit 0 - # don't exit right away when new account is created but also let the rootdir stuff execute - [[ -n ${NEW_ACCOUNT_NAME} ]] && CONTINUE_WITH_NO_INPUT="true" - } - - # set CHECK_MODE if empty, below are check mode values - # 1 = check only name, 2 = check name and size, 3 = check name and md5sum - [[ -z ${CHECK_MODE} ]] && { - case "${SKIP_DUPLICATES:-${OVERWRITE}}" in - "Overwrite") export CHECK_MODE="1" ;; - "Skip Existing") export CHECK_MODE="2" ;; - esac - } - - return 0 -} - -################################################### -# Setup root directory where all file/folders will be uploaded/updated -# Globals: 5 variables, 6 functions -# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET -# Functions - _print_center, _drive_info, _extract_id, _update_config, _json_value, _set_value -# Arguments: None -# Result: read description -# If root id not found then print message and exit -# Update config with root id and root id name if specified -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#use-read-as-an-alternative-to-the-sleep-command -################################################### -_setup_root_dir() { - _check_root_id() { - declare json rootid - json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id")" - if ! rootid="$(_json_value id 1 1 <<< "${json}")"; then - { [[ ${json} =~ "File not found" ]] && "${QUIET:-_print_center}" "justify" "Given root folder" " ID/URL invalid." "=" 1>&2; } || { - printf "%s\n" "${json}" 1>&2 - } - return 1 - fi - ROOT_FOLDER="${rootid}" - "${1:-:}" "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" "${ROOT_FOLDER}" "${CONFIG}" || return 1 - return 0 - } - _check_root_id_name() { - ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" | _json_value name || :)" - "${1:-:}" "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER_NAME" "${ROOT_FOLDER_NAME}" "${CONFIG}" || return 1 - return 0 - } - - _set_value indirect ROOT_FOLDER "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" - _set_value indirect ROOT_FOLDER_NAME "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER_NAME" - - if [[ -n ${ROOTDIR:-} ]]; then - ROOT_FOLDER="${ROOTDIR}" && { _check_root_id "${UPDATE_DEFAULT_ROOTDIR}" || return 1; } && unset ROOT_FOLDER_NAME - elif [[ -z ${ROOT_FOLDER} ]]; then - { [[ -t 1 ]] && "${QUIET:-_print_center}" "normal" "Enter root folder ID or URL, press enter for default ( root )" " " && printf -- "-> " && - read -r ROOT_FOLDER && [[ -n ${ROOT_FOLDER} ]] && { _check_root_id _update_config || return 1; }; } || { - ROOT_FOLDER="root" - _update_config "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" "${ROOT_FOLDER}" "${CONFIG}" || return 1 - } - elif [[ -z ${ROOT_FOLDER_NAME} ]]; then - _check_root_id_name _update_config || return 1 # update default root folder name if not available - fi - - # fetch root folder name if rootdir different than default - [[ -z ${ROOT_FOLDER_NAME} ]] && { _check_root_id_name "${UPDATE_DEFAULT_ROOTDIR}" || return 1; } - - return 0 -} - -################################################### -# Setup Workspace folder -# Check if the given folder exists in google drive. -# If not then the folder is created in google drive under the configured root folder. -# Globals: 2 variables, 3 functions -# Variables - FOLDERNAME, ROOT_FOLDER -# Functions - _create_directory, _drive_info, _json_value -# Arguments: None -# Result: Read Description -################################################### -_setup_workspace() { - if [[ -z ${FOLDERNAME} ]]; then - WORKSPACE_FOLDER_ID="${ROOT_FOLDER}" - WORKSPACE_FOLDER_NAME="${ROOT_FOLDER_NAME}" - else - WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}")" || - { printf "%s\n" "${WORKSPACE_FOLDER_ID}" 1>&2 && return 1; } - WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name | _json_value name 1 1)" || - { printf "%s\n" "${WORKSPACE_FOLDER_NAME}" 1>&2 && return 1; } - fi - return 0 -} - -################################################### -# Process all the values in "${FINAL_LOCAL_INPUT_ARRAY[@]}" & "${FINAL_ID_INPUT_ARRAY[@]}" -# Globals: 22 variables, 17 functions -# Variables - FINAL_LOCAL_INPUT_ARRAY ( array ), ACCESS_TOKEN, VERBOSE, VERBOSE_PROGRESS -# WORKSPACE_FOLDER_ID, UPLOAD_MODE, SKIP_DUPLICATES, OVERWRITE, SHARE, -# UPLOAD_STATUS, COLUMNS, API_URL, API_VERSION, TOKEN_URL, LOG_FILE_ID -# FILE_ID, FILE_LINK, FINAL_ID_INPUT_ARRAY ( array ) -# PARALLEL_UPLOAD, QUIET, NO_OF_PARALLEL_JOBS, TMPFILE, SHARE_ROLE -# Functions - _print_center, _clear_line, _newline, _support_ansi_escapes, _print_center_quiet -# _upload_file, _share_id, _is_terminal, _dirname, -# _create_directory, _json_value, _url_encode, _check_existing_file, _bytes_to_human -# _clone_file, _get_access_token_and_update, _get_rootdir_id -# Arguments: None -# Result: Upload/Clone all the input files/folders, if a folder is empty, print Error message. -################################################### -_process_arguments() { - # on successful uploads - _share_and_print_link() { - "${SHARE:-:}" "${1:-}" "${SHARE_ROLE}" "${SHARE_EMAIL}" - [[ -z ${HIDE_INFO} ]] && { - _print_center "justify" "DriveLink" "${SHARE:+ (SHARED[${SHARE_ROLE:0:1}])}" "-" - _support_ansi_escapes && [[ ${COLUMNS} -gt 45 ]] && _print_center "normal" "↓ ↓ ↓" ' ' - "${QUIET:-_print_center}" "normal" "https://drive.google.com/open?id=${1:-}" " " - } - return 0 - } - - for input in "${FINAL_LOCAL_INPUT_ARRAY[@]}"; do - # Check if the argument is a file or a directory. - if [[ -f ${input} ]]; then - # export DESCRIPTION_FILE, used for descriptions in _upload_file function - export DESCRIPTION_FILE="${DESCRIPTION}" - - _print_center "justify" "Given Input" ": FILE" "=" - _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" - _upload_file_main noparse "${input}" "${WORKSPACE_FOLDER_ID}" - if [[ ${RETURN_STATUS} = 1 ]]; then - _share_and_print_link "${FILE_ID}" - printf "\n" - else - for _ in 1 2; do _clear_line 1; done && continue - fi - elif [[ -d ${input} ]]; then - input="$(cd "${input}" && pwd)" || return 1 # to handle _dirname when current directory (.) is given as input. - unset EMPTY # Used when input folder is empty - - # export DESCRIPTION_FILE only if DESCRIPTION_ALL var is available, used for descriptions in _upload_file function - export DESCRIPTION_FILE="${DESCRIPTION_ALL:+${DESCRIPTION}}" - - _print_center "justify" "Given Input" ": FOLDER" "-" - _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" - FOLDER_NAME="${input##*/}" && "${EXTRA_LOG}" "justify" "Folder: ${FOLDER_NAME}" "=" - - NEXTROOTDIRID="${WORKSPACE_FOLDER_ID}" - - "${EXTRA_LOG}" "justify" "Processing folder.." "-" - - [[ -z ${SKIP_SUBDIRS} ]] && "${EXTRA_LOG}" "justify" "Indexing subfolders.." "-" - # Do not create empty folders during a recursive upload. Use of find in this section is important. - mapfile -t DIRNAMES <<< "$(find "${input}" -type d -not -empty)" - NO_OF_FOLDERS="${#DIRNAMES[@]}" && NO_OF_SUB_FOLDERS="$((NO_OF_FOLDERS - 1))" - [[ -z ${SKIP_SUBDIRS} ]] && _clear_line 1 - [[ ${NO_OF_SUB_FOLDERS} = 0 ]] && SKIP_SUBDIRS="true" - - "${EXTRA_LOG}" "justify" "Indexing files.." "-" - mapfile -t FILENAMES <<< "$(_tmp='find "'${input}'" -type f -name "*" '${INCLUDE_FILES}' '${EXCLUDE_FILES}'' && eval "${_tmp}")" - _clear_line 1 - - # Skip the sub folders and find recursively all the files and upload them. - if [[ -n ${SKIP_SUBDIRS} ]]; then - if [[ -n ${FILENAMES[0]} ]]; then - for _ in 1 2; do _clear_line 1; done - NO_OF_FILES="${#FILENAMES[@]}" - - "${QUIET:-_print_center}" "justify" "Folder: ${FOLDER_NAME} " "| ${NO_OF_FILES} File(s)" "=" && printf "\n" - "${EXTRA_LOG}" "justify" "Creating folder.." "-" - { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}")" && export ID; } || - { "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; } - _clear_line 1 && DIRIDS="${ID}" - - [[ -z ${PARALLEL_UPLOAD:-${VERBOSE:-${VERBOSE_PROGRESS}}} ]] && _newline "\n" - _upload_folder "${PARALLEL_UPLOAD:-normal}" noparse "$(printf "%s\n" "${FILENAMES[@]}")" "${ID}" - [[ -n ${PARALLEL_UPLOAD:+${VERBOSE:-${VERBOSE_PROGRESS}}} ]] && _newline "\n\n" - else - for _ in 1 2; do _clear_line 1; done && EMPTY=1 - fi - else - if [[ -n ${FILENAMES[0]} ]]; then - for _ in 1 2; do _clear_line 1; done - NO_OF_FILES="${#FILENAMES[@]}" - "${QUIET:-_print_center}" "justify" "${FOLDER_NAME} " "| ${NO_OF_FILES} File(s) | ${NO_OF_SUB_FOLDERS} Sub-folders" "=" - - _newline "\n" && "${EXTRA_LOG}" "justify" "Creating Folder(s).." "-" && _newline "\n" - unset status DIRIDS - for dir in "${DIRNAMES[@]}"; do - [[ -n ${status} ]] && __dir="$(_dirname "${dir}")" && - __temp="$(printf "%s\n" "${DIRIDS}" | grep -F "|:_//_:|${__dir}|:_//_:|")" && - NEXTROOTDIRID="${__temp%%"|:_//_:|${__dir}|:_//_:|"}" - - NEWDIR="${dir##*/}" && _print_center "justify" "Name: ${NEWDIR}" "-" 1>&2 - ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}")" || - { "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; } - - # Store sub-folder directory IDs and it's path for later use. - DIRIDS+="${ID}|:_//_:|${dir}|:_//_:|"$'\n' - - for _ in 1 2; do _clear_line 1 1>&2; done - "${EXTRA_LOG}" "justify" "Status" ": $((status += 1)) / ${NO_OF_FOLDERS}" "=" 1>&2 - done && export DIRIDS - - _clear_line 1 - - _upload_folder "${PARALLEL_UPLOAD:-normal}" parse "$(printf "%s\n" "${FILENAMES[@]}")" - [[ -n ${PARALLEL_UPLOAD:+${VERBOSE:-${VERBOSE_PROGRESS}}} ]] && _newline "\n\n" - else - for _ in 1 2 3; do _clear_line 1; done && EMPTY=1 - fi - fi - if [[ ${EMPTY} != 1 ]]; then - [[ -z ${VERBOSE:-${VERBOSE_PROGRESS}} ]] && for _ in 1 2; do _clear_line 1; done - - FOLDER_ID="$(: "${DIRIDS%%$'\n'*}" && printf "%s\n" "${_/"|:_//_:|"*/}")" - - [[ ${SUCCESS_STATUS} -gt 0 ]] && _share_and_print_link "${FOLDER_ID}" - - _newline "\n" - [[ ${SUCCESS_STATUS} -gt 0 ]] && "${QUIET:-_print_center}" "justify" "Total Files " "Uploaded: ${SUCCESS_STATUS}" "=" - [[ ${ERROR_STATUS} -gt 0 ]] && "${QUIET:-_print_center}" "justify" "Total Files " "Failed: ${ERROR_STATUS}" "=" && { - # If running inside a terminal, then check if failed files are more than 25, if not, then print, else save in a log file - if [[ -t 1 ]]; then - { [[ ${ERROR_STATUS} -le 25 ]] && printf "%s\n" "${ERROR_FILES}"; } || { - epoch_time="$(printf "%(%s)T\\n" "-1")" log_file_name="${0##*/}_${FOLDER_NAME}_${epoch_time}.failed" - # handle in case the vivid random file name was already there - i=0 && until ! [[ -f ${log_file_name} ]]; do - : $((i += 1)) && log_file_name="${0##*/}_${FOLDER_NAME}_$((epoch_time + i)).failed" - done - printf "%s\n%s\n%s\n\n%s\n%s\n" \ - "Folder name: ${FOLDER_NAME} | Folder ID: ${FOLDER_ID}" \ - "Run this command to retry the failed uploads:" \ - " ${0##*/} --skip-duplicates \"${input}\" --root-dir \"${NEXTROOTDIRID}\" ${SKIP_SUBDIRS:+-s} ${PARALLEL_UPLOAD:+--parallel} ${PARALLEL_UPLOAD:+${NO_OF_PARALLEL_JOBS}}" \ - "Failed files:" \ - "${ERROR_FILES}" >> "${log_file_name}" - printf "%s\n" "To see the failed files, open \"${log_file_name}\"" - printf "%s\n" "To retry the failed uploads only, use -d / --skip-duplicates flag. See log file for more help." - - } - # if not running inside a terminal, print it all - else - printf "%s\n" "${ERROR_FILES}" - fi - } - printf "\n" - else - for _ in 1 2 3; do _clear_line 1; done - "${QUIET:-_print_center}" 'justify' "Empty Folder" ": ${FOLDER_NAME}" "=" 1>&2 - printf "\n" - fi - fi - done - - unset Aseen && declare -A Aseen - for gdrive_id in "${FINAL_ID_INPUT_ARRAY[@]}"; do - { [[ ${Aseen[${gdrive_id}]} ]] && continue; } || Aseen[${gdrive_id}]=x - _print_center "justify" "Given Input" ": ID" "=" - "${EXTRA_LOG}" "justify" "Checking if id exists.." "-" - [[ ${CHECK_MODE} = "md5Checksum" ]] && declare param="md5Checksum" - json="$(_drive_info "${gdrive_id}" "name,mimeType,size${param:+,${param}}" || :)" - if ! _json_value code 1 1 <<< "${json}" 2>| /dev/null 1>&2; then - type="$(_json_value mimeType 1 1 <<< "${json}" || :)" - name="$(_json_value name 1 1 <<< "${json}" || :)" - size="$(_json_value size 1 1 <<< "${json}" || :)" - [[ ${CHECK_MODE} = "md5Checksum" ]] && md5="$(_json_value md5Checksum 1 1 <<< "${json}" || :)" - for _ in 1 2; do _clear_line 1; done - if [[ ${type} =~ folder ]]; then - # export DESCRIPTION_FILE only if DESCRIPTION_ALL var is available, used for descriptions in _clone_file function - export DESCRIPTION_FILE="${DESCRIPTION_ALL+:${DESCRIPTION}}" - - "${QUIET:-_print_center}" "justify" "Folder not supported." "=" 1>&2 && _newline "\n" 1>&2 && continue - ## TODO: Add support to clone folders - else - # export DESCRIPTION_FILE, used for descriptions in _clone_file function - export DESCRIPTION_FILE="${DESCRIPTION}" - - _print_center "justify" "Given Input" ": File ID" "=" - _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" - _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${name}" "${size}" "${md5}" || - { for _ in 1 2; do _clear_line 1; done && continue; } - fi - _share_and_print_link "${FILE_ID}" - printf "\n" - else - _clear_line 1 - "${QUIET:-_print_center}" "justify" "File ID (${HIDE_INFO:-gdrive_id})" " invalid." "=" 1>&2 - printf "\n" - fi - done - return 0 -} - -main() { - [[ $# = 0 ]] && _short_help - - [[ -z ${SELF_SOURCE} ]] && { - export UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" - { . "${UTILS_FOLDER}"/auth-utils.bash && . "${UTILS_FOLDER}"/common-utils.bash && . "${UTILS_FOLDER}"/drive-utils.bash && . "${UTILS_FOLDER}"/upload-utils.bash; } || - { printf "Error: Unable to source util files.\n" && exit 1; } - } - - _check_bash_version && set -o errexit -o noclobber -o pipefail - - _setup_arguments "${@}" || exit 1 - "${SKIP_INTERNET_CHECK:-_check_internet}" || exit 1 - - { { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || - TMPFILE="${PWD}/.$(_t="$(printf "%(%s)T\\n" "-1")" && printf "%s\n" "$((_t * _t))").LOG"; } || exit 1 - export TMPFILE - - _cleanup() { - # unhide the cursor if hidden - [[ -n ${SUPPORT_ANSI_ESCAPES} ]] && printf "\033[?25h\033[?7h" - { - [[ -f ${TMPFILE}_ACCESS_TOKEN ]] && { - # update the config with latest ACCESS_TOKEN and ACCESS_TOKEN_EXPIRY only if changed - . "${TMPFILE}_ACCESS_TOKEN" - [[ ${INITIAL_ACCESS_TOKEN} = "${ACCESS_TOKEN}" ]] || { - _update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN" "${ACCESS_TOKEN}" "${CONFIG}" - _update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN_EXPIRY" "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}" - } - } || : 1>| /dev/null - - # grab all chidren processes of access token service - # https://askubuntu.com/a/512872 - [[ -n ${ACCESS_TOKEN_SERVICE_PID} ]] && { - token_service_pids="$(ps --ppid="${ACCESS_TOKEN_SERVICE_PID}" -o pid=)" - # first kill parent id, then children processes - kill "${ACCESS_TOKEN_SERVICE_PID}" - } || : 1>| /dev/null - - # grab all script children pids - script_children_pids="$(ps --ppid="${MAIN_PID}" -o pid=)" - - # kill all grabbed children processes - # shellcheck disable=SC2086 - kill ${token_service_pids} ${script_children_pids} 1>| /dev/null - - rm -f "${TMPFILE:?}"* - - export abnormal_exit && if [[ -n ${abnormal_exit} ]]; then - printf "\n\n%s\n" "Script exited manually." - kill -- -$$ & - else - { _cleanup_config "${CONFIG}" && [[ ${GUPLOAD_INSTALLED_WITH} = script ]] && _auto_update; } 1>| /dev/null & - fi - } 2>| /dev/null || : - return 0 - } - - trap 'abnormal_exit="1"; exit' INT TERM - trap '_cleanup' EXIT - trap '' TSTP # ignore ctrl + z - - export MAIN_PID="$$" - - START="$(printf "%(%s)T\\n" "-1")" - - "${EXTRA_LOG}" "justify" "Checking credentials.." "-" - { _check_credentials && _clear_line 1; } || - { "${QUIET:-_print_center}" "normal" "[ Error: Credentials checking failed ]" "=" && exit 1; } - "${QUIET:-_print_center}" "normal" " Account: ${ACCOUNT_NAME} " "=" - - "${EXTRA_LOG}" "justify" "Checking root dir.." "-" - { _setup_root_dir && _clear_line 1; } || - { "${QUIET:-_print_center}" "normal" "[ Error: Rootdir setup failed ]" "=" && exit 1; } - _print_center "justify" "Root dir properly configured." "=" - - # only execute next blocks if there was some input - [[ -n ${CONTINUE_WITH_NO_INPUT} ]] && exit 0 - - "${EXTRA_LOG}" "justify" "Checking Workspace Folder.." "-" - { _setup_workspace && for _ in 1 2; do _clear_line 1; done; } || - { "${QUIET:-_print_center}" "normal" "[ Error: Workspace setup failed ]" "=" && exit 1; } - _print_center "justify" "Workspace Folder: ${WORKSPACE_FOLDER_NAME}" "=" - _print_center "normal" " ${WORKSPACE_FOLDER_ID} " "-" && _newline "\n" - - # hide the cursor if ansi escapes are supported - [[ -n ${SUPPORT_ANSI_ESCAPES} ]] && printf "\033[?25l" - - _process_arguments - - END="$(printf "%(%s)T\\n" "-1")" - DIFF="$((END - START))" - "${QUIET:-_print_center}" "normal" " Time Elapsed: ""$((DIFF / 60))"" minute(s) and ""$((DIFF % 60))"" seconds " "=" -} - -{ [[ -z ${SOURCED_GUPLOAD} ]] && main "${@}"; } || : diff --git a/bash/upload-utils.bash b/bash/upload-utils.bash deleted file mode 100755 index d1338ce..0000000 --- a/bash/upload-utils.bash +++ /dev/null @@ -1,176 +0,0 @@ -#!/usr/bin/env bash -# shellcheck source=/dev/null - -################################################### -# A simple wrapper to check tempfile for access token and make authorized oauth requests to drive api -################################################### -_api_request() { - . "${TMPFILE}_ACCESS_TOKEN" - - curl --compressed \ - -H "Authorization: Bearer ${ACCESS_TOKEN}" \ - "${@}" -} - -################################################### -# Used in collecting file properties from output json after a file has been uploaded/cloned -# Also handles logging in log file if LOG_FILE_ID is set -# Globals: 1 variables, 2 functions -# Variables - LOG_FILE_ID -# Functions - _error_logging_upload, _json_value -# Arguments: 1 -# ${1} = output jsom -# Result: set fileid and link, save info to log file if required -################################################### -_collect_file_info() { - declare json="${1}" info - FILE_ID="$(_json_value id 1 1 <<< "${json}")" || { _error_logging_upload "${2}" "${json}" || return 1; } - [[ -z ${LOG_FILE_ID} || -d ${LOG_FILE_ID} ]] && return 0 - info="Link: https://drive.google.com/open?id=${FILE_ID} -Name: $(_json_value name 1 1 <<< "${json}" || :) -ID: ${FILE_ID} -Type: $(_json_value mimeType 1 1 <<< "${json}" || :)" - printf "%s\n\n" "${info}" >> "${LOG_FILE_ID}" - return 0 -} - -################################################### -# Error logging wrapper -################################################### -_error_logging_upload() { - declare log="${2}" - "${QUIET:-_print_center}" "justify" "Upload ERROR" ", ${1:-} not ${STRING:-uploaded}." "=" 1>&2 - case "${log}" in - # https://github.com/rclone/rclone/issues/3857#issuecomment-573413789 - *'"message": "User rate limit exceeded."'*) - printf "%s\n\n%s\n" "${log}" \ - "Today's upload limit reached for this account. Use another account to upload or wait for tomorrow." 1>&2 - # Never retry if upload limit reached - export RETRY=0 - ;; - '' | *) printf "%s\n" "${log}" 1>&2 ;; - esac - printf "\n\n\n" 1>&2 - return 1 -} - -################################################### -# A small function to get rootdir id for files in sub folder uploads -# Globals: 1 variable, 1 function -# Variables - DIRIDS -# Functions - _dirname -# Arguments: 1 -# ${1} = filename -# Result: read discription -################################################### -_get_rootdir_id() { - declare file="${1:?Error: give filename}" __rootdir __temp - __rootdir="$(_dirname "${file}")" - __temp="$(grep -F "|:_//_:|${__rootdir}|:_//_:|" <<< "${DIRIDS:?Error: DIRIDS Missing}" || :)" - printf "%s\n" "${__temp%%"|:_//_:|${__rootdir}|:_//_:|"}" - return 0 -} - -################################################### -# A extra wrapper for _upload_file function to properly handle retries -# also handle uploads in case uploading from folder -# Globals: 2 variables, 1 function -# Variables - RETRY, UPLOAD_MODE -# Functions - _upload_file -# Arguments: 3 -# ${1} = parse or norparse -# ${2} = file path -# ${3} = if ${1} != parse; gdrive folder id to upload; fi -# Result: set SUCCESS var on success -################################################### -_upload_file_main() { - [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare file="${2}" dirid _sleep - { [[ ${1} = parse ]] && dirid="$(_get_rootdir_id "${file}")"; } || dirid="${3}" - - retry="${RETRY:-0}" && unset RETURN_STATUS - until [[ ${retry} -le 0 ]] && [[ -n ${RETURN_STATUS} ]]; do - if [[ -n ${4} ]]; then - { _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break; } || RETURN_STATUS=2 - else - { _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" && RETURN_STATUS=1 && break; } || RETURN_STATUS=2 - fi - # decrease retry using -=, skip sleep if all retries done - [[ $((retry -= 1)) -lt 1 ]] && sleep "$((_sleep += 1))" - # on every retry, sleep the times of retry it is, e.g for 1st, sleep 1, for 2nd, sleep 2 - continue - done - [[ -n ${4} ]] && { - { [[ ${RETURN_STATUS} = 1 ]] && printf "%s\n" "${file}"; } || printf "%s\n" "${file}" 1>&2 - } - return 0 -} - -################################################### -# Upload all files in the given folder, parallelly or non-parallely and show progress -# Globals: 7 variables, 3 functions -# Variables - VERBOSE, VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET -# Functions - _clear_line, _newline, _print_center and _upload_file_main -# Arguments: 4 -# ${1} = parallel or normal -# ${2} = parse or norparse -# ${3} = filenames with full path -# ${4} = if ${2} != parse; then gdrive folder id to upload; fi -# Result: read discription, set SUCCESS_STATUS & ERROR_STATUS -################################################### -_upload_folder() { - [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - declare mode="${1}" files="${3}" && PARSE_MODE="${2}" ID="${4:-}" && export PARSE_MODE ID - SUCCESS_STATUS=0 SUCCESS_FILES="" ERROR_STATUS=0 ERROR_FILES="" - case "${mode}" in - normal) - [[ ${PARSE_MODE} = parse ]] && _clear_line 1 && _newline "\n" - - while read -u 4 -r file; do - _upload_file_main "${PARSE_MODE}" "${file}" "${ID}" - { [[ ${RETURN_STATUS} = 1 ]] && : "$((SUCCESS_STATUS += 1))" && SUCCESS_FILES+="${file}"$'\n'; } || - { : "$((ERROR_STATUS += 1))" && ERROR_FILES+="${file}"$'\n'; } - if [[ -n ${VERBOSE:-${VERBOSE_PROGRESS}} ]]; then - _print_center "justify" "Status: ${SUCCESS_STATUS} Uploaded" " | ${ERROR_STATUS} Failed" "=" && _newline "\n" - else - for _ in 1 2; do _clear_line 1; done - _print_center "justify" "Status: ${SUCCESS_STATUS} Uploaded" " | ${ERROR_STATUS} Failed" "=" - fi - done 4<<< "${files}" - ;; - parallel) - NO_OF_PARALLEL_JOBS_FINAL="$((NO_OF_PARALLEL_JOBS > NO_OF_FILES ? NO_OF_FILES : NO_OF_PARALLEL_JOBS))" - [[ -f "${TMPFILE}"SUCCESS ]] && rm "${TMPFILE}"SUCCESS - [[ -f "${TMPFILE}"ERROR ]] && rm "${TMPFILE}"ERROR - - # shellcheck disable=SC2016 - printf "%s\n" "${files}" | xargs -P"${NO_OF_PARALLEL_JOBS_FINAL}" -I "{}" -n 1 bash -c ' - _upload_file_main "${PARSE_MODE}" "{}" "${ID}" true - ' 1>| "${TMPFILE}"SUCCESS 2>| "${TMPFILE}"ERROR & - pid="${!}" - - until [[ -f "${TMPFILE}"SUCCESS ]] || [[ -f "${TMPFILE}"ERORR ]]; do sleep 0.5; done - [[ ${PARSE_MODE} = parse ]] && _clear_line 1 - _newline "\n" - - until ! kill -0 "${pid}" 2>| /dev/null 1>&2; do - SUCCESS_STATUS="$(_count < "${TMPFILE}"SUCCESS)" - ERROR_STATUS="$(_count < "${TMPFILE}"ERROR)" - sleep 1 - [[ $((SUCCESS_STATUS + ERROR_STATUS)) != "${TOTAL}" ]] && - _clear_line 1 && "${QUIET:-_print_center}" "justify" "Status" ": ${SUCCESS_STATUS} Uploaded | ${ERROR_STATUS} Failed" "=" - TOTAL="$((SUCCESS_STATUS + ERROR_STATUS))" - done - SUCCESS_STATUS="$(_count < "${TMPFILE}"SUCCESS)" SUCCESS_FILES="$(< "${TMPFILE}"SUCCESS)" - ERROR_STATUS="$(_count < "${TMPFILE}"ERROR)" ERROR_FILES="$(< "${TMPFILE}"ERROR)" - ;; - esac - return 0 -} - -export -f _api_request \ - _collect_file_info \ - _error_logging_upload \ - _get_rootdir_id \ - _upload_file_main \ - _upload_folder diff --git a/bash/upload.bash b/bash/upload.bash deleted file mode 100755 index 42a2184..0000000 --- a/bash/upload.bash +++ /dev/null @@ -1,730 +0,0 @@ -#!/usr/bin/env bash -# Upload a file to Google Drive -# shellcheck source=/dev/null - -_usage() { - printf "%b" " -The script can be used to upload file/directory to google drive.\n -Usage:\n ${0##*/} [options.. ] \n -Foldername argument is optional. If not provided, the file will be uploaded to preconfigured google drive.\n -File name argument is optional if create directory option is used.\n -Options:\n - -a | --account 'account name' - Use different account than the default one.\n - To change the default account name, use this format, -a/--account default=account_name\n - -la | --list-accounts - Print all configured accounts in the config files.\n - -ca | --create-account 'account name' - To create a new account with the given name if does not already exists.\n - -da | --delete-account 'account name' - To delete an account information from config file. \n - -c | -C | --create-dir - option to create directory. Will provide folder id. Can be used to provide input folder, see README.\n - -r | --root-dir or - google folder ID/URL to which the file/directory is going to upload. - If you want to change the default value, then use this format, -r/--root-dir default=root_folder_id/root_folder_url\n - -s | --skip-subdirs - Skip creation of sub folders and upload all files inside the INPUT folder/sub-folders in the INPUT folder, use this along with -p/--parallel option to speed up the uploads.\n - -p | --parallel - Upload multiple files in parallel, Max value = 10.\n - -f | --[file|folder] - Specify files and folders explicitly in one command, use multiple times for multiple folder/files. See README for more use of this command.\n - -cl | --clone - Upload a gdrive file without downloading, require accessible gdrive link or id as argument.\n - -o | --overwrite - Overwrite the files with the same name, if present in the root folder/input folder, also works with recursive folders.\n - -d | --skip-duplicates - Do not upload the files with the same name and size, if already present in the root folder/input folder, also works with recursive folders.\n - -cm | --check-mode - Additional flag for --overwrite and --skip-duplicates flag. Can be used to change check mode in those flags, available args are 'size' and 'md5'.\n - -desc | --description | --description-all - Specify description for the given file. To use the respective metadata of a file, below is the format:\n - File name ( fullname ): %f | Size: %s | Mime Type: %m\n - Now to actually use it: --description 'Filename: %f, Size: %s, Mime: %m'\n - Note: For files inside folders, use --description-all flag.\n - -S | --share - Share the uploaded input file/folder, grant reader permission to provided email address or to everyone with the shareable link.\n - -SM | -sm | --share-mode 'share mode' - Specify the share mode for sharing file.\n - Share modes are: r / reader - Read only permission.\n - : w / writer - Read and write permission.\n - : c / commenter - Comment only permission.\n - Note: Although this flag is independent of --share flag but when email is needed, then --share flag use is neccessary.\n - --speed 'speed' - Limit the download speed, supported formats: 1K, 1M and 1G.\n - -i | --save-info - Save uploaded files info to the given filename.\n - -z | --config - Override default config file with custom config file.\nIf you want to change default value, then use this format -z/--config default=default=your_config_file_path.\n - -q | --quiet - Supress the normal output, only show success/error upload messages for files, and one extra line at the beginning for folder showing no. of files and sub folders.\n - -R | --retry 'num of retries' - Retry the file upload if it fails, postive integer as argument. Currently only for file uploads.\n - -in | --include 'pattern' - Only include the files with the given pattern to upload - Applicable for folder uploads.\n - e.g: ${0##*/} local_folder --include "*1*", will only include with files with pattern '1' in the name.\n - -ex | --exclude 'pattern' - Exclude the files with the given pattern from uploading. - Applicable for folder uploads.\n - e.g: ${0##*/} local_folder --exclude "*1*", will exclude all the files pattern '1' in the name.\n - --hide - This flag will prevent the script to print sensitive information like root folder id or drivelink.\n - -v | --verbose - Display detailed message (only for non-parallel uploads).\n - -V | --verbose-progress - Display detailed message and detailed upload progress(only for non-parallel uploads).\n - --skip-internet-check - Do not check for internet connection, recommended to use in sync jobs. - $([[ ${GUPLOAD_INSTALLED_WITH} = script ]] && printf '%s\n' '\n -u | --update - Update the installed script in your system.\n - -U | --uninstall - Uninstall script, remove related files.\n') - --info - Show detailed info, only if script is installed system wide.\n - -D | --debug - Display script command trace.\n - -h | --help - Display this message.\n" - exit 0 -} - -_short_help() { - printf "No valid arguments provided, use -h/--help flag to see usage.\n" - exit 0 -} - -################################################### -# Print info if installed -# Globals: 7 variable -# COMMAND_NAME REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA -# Arguments: None -# Result: read description -################################################### -_version_info() { - if command -v "${COMMAND_NAME}" 1> /dev/null && [[ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]]; then - for i in REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA CONFIG; do - printf "%s\n" "${i}=\"${!i}\"" - done | sed -e "s/=/: /g" - else - printf "%s\n" "google-drive-upload is not installed system wide." - fi - exit 0 -} - -################################################### -# Function to cleanup config file -# Remove invalid access tokens on the basis of corresponding expiry -# Globals: None -# Arguments: 1 -# ${1} = config file -# Result: read description -################################################### -_cleanup_config() { - declare config="${1:?Error: Missing config}" values_regex - - ! [ -f "${config}" ] && return 0 - - while read -r line && [[ -n ${line} ]]; do - expiry_value_name="${line%%=*}" - token_value_name="${expiry_value_name%%_EXPIRY}" - - : "${line##*=}" && : "${_%\"}" && expiry="${_#\"}" - [[ ${expiry} -le "$(printf "%(%s)T\\n" "-1")" ]] && - values_regex="${values_regex:+${values_regex}|}${expiry_value_name}=\".*\"|${token_value_name}=\".*\"" - - done <<< "$(grep -F ACCESS_TOKEN_EXPIRY "${config}" || :)" - - chmod u+w "${config}" && - printf "%s\n" "$(grep -Ev "^\$${values_regex:+|${values_regex}}" "${config}")" >| "${config}" && - chmod "a-w-r-x,u+r" "${config}" - return 0 -} - -################################################### -# Process all arguments given to the script -# Globals: 2 variable, 1 function -# Variable - HOME, CONFIG -# Functions - _short_help -# Arguments: Many -# ${@} = Flags with argument and file/folder input -# Result: On -# Success - Set all the variables -# Error - Print error message and exit -# Reference: -# Email Regex - https://gist.github.com/guessi/82a73ee7eb2b1216eb9db17bb8d65dd1 -################################################### -_setup_arguments() { - [[ $# = 0 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1 - # Internal variables - # De-initialize if any variables set already. - unset LIST_ACCOUNTS UPDATE_DEFAULT_ACCOUNT CUSTOM_ACCOUNT_NAME NEW_ACCOUNT_NAME DELETE_ACCOUNT_NAME ACCOUNT_ONLY_RUN - unset FOLDERNAME LOCAL_INPUT_ARRAY ID_INPUT_ARRAY CONTINUE_WITH_NO_INPUT - unset PARALLEL NO_OF_PARALLEL_JOBS SHARE SHARE_ROLE SHARE_EMAIL OVERWRITE SKIP_DUPLICATES CHECK_MODE DESCRIPTION SKIP_SUBDIRS ROOTDIR QUIET - unset VERBOSE VERBOSE_PROGRESS DEBUG LOG_FILE_ID CURL_SPEED RETRY - export CURL_PROGRESS="-s" EXTRA_LOG=":" CURL_PROGRESS_EXTRA="-s" - INFO_PATH="${HOME}/.google-drive-upload" CONFIG_INFO="${INFO_PATH}/google-drive-upload.configpath" - [[ -f ${CONFIG_INFO} ]] && . "${CONFIG_INFO}" - CONFIG="${CONFIG:-${HOME}/.googledrive.conf}" - - # Configuration variables # Remote gDrive variables - unset ROOT_FOLDER ROOT_FOLDER_NAME CLIENT_ID CLIENT_SECRET REFRESH_TOKEN ACCESS_TOKEN - export API_URL="https://www.googleapis.com" - export API_VERSION="v3" \ - SCOPE="${API_URL}/auth/drive" \ - REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" \ - TOKEN_URL="https://accounts.google.com/o/oauth2/token" - - _check_config() { - [[ ${1} = default* ]] && export UPDATE_DEFAULT_CONFIG="_update_config" - { [[ -r ${2} ]] && CONFIG="${2}"; } || { - printf "Error: Given config file (%s) doesn't exist/not readable,..\n" "${1}" 1>&2 && exit 1 - } - return 0 - } - - _check_longoptions() { - [[ -z ${2} ]] && - printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && - exit 1 - return 0 - } - - while [[ $# -gt 0 ]]; do - case "${1}" in - -h | --help) _usage ;; - -D | --debug) DEBUG="true" && export DEBUG ;; - --info) _version_info ;; - -a | --account) - _check_longoptions "${1}" "${2}" - export CUSTOM_ACCOUNT_NAME="${2/default=/}" && shift - [[ ${2} = default* ]] && export UPDATE_DEFAULT_ACCOUNT="_update_config" - ;; - -la | --list-account) export LIST_ACCOUNTS="true" ;; - # this flag is preferred over --account - -ca | --create-account) - _check_longoptions "${1}" "${2}" - export NEW_ACCOUNT_NAME="${2}" && shift - ;; - -da | --delete-account) - _check_longoptions "${1}" "${2}" - export DELETE_ACCOUNT_NAME="${2}" && shift - ;; - -c | -C | --create-dir) - _check_longoptions "${1}" "${2}" - FOLDERNAME="${2}" && shift - ;; - -r | --root-dir) - _check_longoptions "${1}" "${2}" - ROOTDIR="${2/default=/}" - [[ ${2} = default* ]] && UPDATE_DEFAULT_ROOTDIR="_update_config" - shift - ;; - -z | --config) - _check_longoptions "${1}" "${2}" - _check_config "${2}" "${2/default=/}" - shift - ;; - -i | --save-info) - _check_longoptions "${1}" "${2}" - export LOG_FILE_ID="${2}" && shift - ;; - -s | --skip-subdirs) export SKIP_SUBDIRS="true" ;; - -p | --parallel) - _check_longoptions "${1}" "${2}" - NO_OF_PARALLEL_JOBS="${2}" - if [[ ${2} -gt 0 ]]; then - NO_OF_PARALLEL_JOBS="$((NO_OF_PARALLEL_JOBS > 10 ? 10 : NO_OF_PARALLEL_JOBS))" - else - printf "\nError: -p/--parallel value ranges between 1 to 10.\n" - exit 1 - fi - export PARALLEL_UPLOAD="parallel" && shift - ;; - -o | --overwrite) export OVERWRITE="Overwrite" UPLOAD_MODE="update" ;; - -d | --skip-duplicates) export SKIP_DUPLICATES="Skip Existing" UPLOAD_MODE="update" ;; - -cm | --check-mode) - _check_longoptions "${1}" "${2}" - case "${2}" in - size) export CHECK_MODE="2" && shift ;; - md5) export CHECK_MODE="3" && shift ;; - *) printf "\nError: -cm/--check-mode takes size and md5 as argument.\n" ;; - esac - ;; - -desc | --description | --description-all) - _check_longoptions "${1}" "${2}" - [[ ${1} = "--description-all" ]] && export DESCRIPTION_ALL="true" - export DESCRIPTION="${2}" && shift - ;; - -f | --file | --folder) - _check_longoptions "${1}" "${2}" - LOCAL_INPUT_ARRAY+=("${2}") && shift - ;; - -cl | --clone) - _check_longoptions "${1}" "${2}" - FINAL_ID_INPUT_ARRAY+=("$(_extract_id "${2}")") && shift - ;; - -S | --share) - SHARE="_share_id" - EMAIL_REGEX="^(([A-Za-z0-9]+((\.|\-|\_|\+)?[A-Za-z0-9]?)*[A-Za-z0-9]+)|[A-Za-z0-9]+)@(([A-Za-z0-9]+)+((\.|\-|\_)?([A-Za-z0-9]+)+)*)+\.([A-Za-z]{2,})+$" - if [[ -n ${2} && ! ${2} = -* && ${2} =~ ${EMAIL_REGEX} ]]; then - SHARE_EMAIL="${2}" && shift && export SHARE_EMAIL - fi - SHARE_ROLE="${SHARE_ROLE:-reader}" - ;; - -[Ss][Mm] | --share-mode) - _check_longoptions "${1}" "${2}" - case "${2}" in - r | read*) SHARE_ROLE="reader" ;; - w | write*) SHARE_ROLE="writer" ;; - c | comment*) SHARE_ROLE="commenter" ;; - *) - printf "%s\n" "Invalid share mode given ( ${2} ). Supported values are r or reader / w or writer / c or commenter." && - exit 1 - ;; - esac - SHARE="_share_id" - shift - ;; - --speed) - _check_longoptions "${1}" "${2}" - regex='^([0-9]+)([k,K]|[m,M]|[g,G])+$' - if [[ ${2} =~ ${regex} ]]; then - export CURL_SPEED="--limit-rate ${2}" && shift - else - printf "Error: Wrong speed limit format, supported formats: 1K , 1M and 1G\n" 1>&2 - exit 1 - fi - ;; - -R | --retry) - _check_longoptions "${1}" "${2}" - if [[ ${2} -gt 0 ]]; then - export RETRY="${2}" && shift - else - printf "Error: -R/--retry only takes positive integers as arguments, min = 1, max = infinity.\n" - exit 1 - fi - ;; - -in | --include) - _check_longoptions "${1}" "${2}" - INCLUDE_FILES="${INCLUDE_FILES} -name '${2}' " && shift - ;; - -ex | --exclude) - _check_longoptions "${1}" "${2}" - EXCLUDE_FILES="${EXCLUDE_FILES} ! -name '${2}' " && shift - ;; - --hide) HIDE_INFO=":" ;; - -q | --quiet) export QUIET="_print_center_quiet" ;; - -v | --verbose) export VERBOSE="true" ;; - -V | --verbose-progress) export VERBOSE_PROGRESS="true" ;; - --skip-internet-check) SKIP_INTERNET_CHECK=":" ;; - '') shorthelp ;; - *) # Check if user meant it to be a flag - if [[ ${1} = -* ]]; then - [[ ${GUPLOAD_INSTALLED_WITH} = script ]] && { - case "${1}" in - -u | --update) - _check_debug && _update && { exit 0 || exit 1; } - ;; - --uninstall) - _check_debug && _update uninstall && { exit 0 || exit 1; } - ;; - esac - } - printf '%s: %s: Unknown option\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && exit 1 - else - if [[ ${1} =~ (drive.google.com|docs.google.com) ]]; then - FINAL_ID_INPUT_ARRAY+=("$(_extract_id "${1}")") - else - # If no "-" is detected in 1st arg, it adds to input - LOCAL_INPUT_ARRAY+=("${1}") - fi - fi - ;; - esac - shift - done - - _check_debug - - [[ -n ${VERBOSE_PROGRESS} ]] && unset VERBOSE && export CURL_PROGRESS="" - [[ -n ${QUIET} ]] && export CURL_PROGRESS="-s" - - # create info path folder, can be missing if gupload was not installed with install.sh - mkdir -p "${INFO_PATH}" - - unset Aseen && declare -A Aseen - for input in "${LOCAL_INPUT_ARRAY[@]}"; do - { [[ ${Aseen[${input}]} ]] && continue; } || Aseen[${input}]=x - { [[ -r ${input} ]] && FINAL_LOCAL_INPUT_ARRAY+=("${input}"); } || { - { "${QUIET:-_print_center}" 'normal' "[ Error: Invalid Input - ${input} ]" "=" && printf "\n"; } 1>&2 - continue - } - done - - # handle account related flags here as we want to use the flags independenlty even with no normal valid inputs - # delete account, --delete-account flag - # TODO: add support for deleting multiple accounts - [[ -n ${DELETE_ACCOUNT_NAME} ]] && _delete_account "${DELETE_ACCOUNT_NAME}" - # list all configured accounts, --list-accounts flag - [[ -n ${LIST_ACCOUNTS} ]] && _all_accounts - - # If no input, then check if either -C option was used. - [[ -z ${FINAL_LOCAL_INPUT_ARRAY[*]:-${FINAL_ID_INPUT_ARRAY[*]:-${FOLDERNAME:-}}} ]] && { - # if any account related option was used then don't show short help - [[ -z ${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-${NEW_ACCOUNT_NAME}}} ]] && _short_help - # exit right away if --list-accounts or --delete-account flag was used - [[ -n ${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-}} ]] && exit 0 - # don't exit right away when new account is created but also let the rootdir stuff execute - [[ -n ${NEW_ACCOUNT_NAME} ]] && CONTINUE_WITH_NO_INPUT="true" - } - - # set CHECK_MODE if empty, below are check mode values - # 1 = check only name, 2 = check name and size, 3 = check name and md5sum - [[ -z ${CHECK_MODE} ]] && { - case "${SKIP_DUPLICATES:-${OVERWRITE}}" in - "Overwrite") export CHECK_MODE="1" ;; - "Skip Existing") export CHECK_MODE="2" ;; - esac - } - - return 0 -} - -################################################### -# Setup root directory where all file/folders will be uploaded/updated -# Globals: 5 variables, 6 functions -# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET -# Functions - _print_center, _drive_info, _extract_id, _update_config, _json_value, _set_value -# Arguments: None -# Result: read description -# If root id not found then print message and exit -# Update config with root id and root id name if specified -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#use-read-as-an-alternative-to-the-sleep-command -################################################### -_setup_root_dir() { - _check_root_id() { - declare json rootid - json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id")" - if ! rootid="$(_json_value id 1 1 <<< "${json}")"; then - { [[ ${json} =~ "File not found" ]] && "${QUIET:-_print_center}" "justify" "Given root folder" " ID/URL invalid." "=" 1>&2; } || { - printf "%s\n" "${json}" 1>&2 - } - return 1 - fi - ROOT_FOLDER="${rootid}" - "${1:-:}" "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" "${ROOT_FOLDER}" "${CONFIG}" || return 1 - return 0 - } - _check_root_id_name() { - ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" | _json_value name || :)" - "${1:-:}" "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER_NAME" "${ROOT_FOLDER_NAME}" "${CONFIG}" || return 1 - return 0 - } - - _set_value indirect ROOT_FOLDER "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" - _set_value indirect ROOT_FOLDER_NAME "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER_NAME" - - if [[ -n ${ROOTDIR:-} ]]; then - ROOT_FOLDER="${ROOTDIR}" && { _check_root_id "${UPDATE_DEFAULT_ROOTDIR}" || return 1; } && unset ROOT_FOLDER_NAME - elif [[ -z ${ROOT_FOLDER} ]]; then - { [[ -t 1 ]] && "${QUIET:-_print_center}" "normal" "Enter root folder ID or URL, press enter for default ( root )" " " && printf -- "-> " && - read -r ROOT_FOLDER && [[ -n ${ROOT_FOLDER} ]] && { _check_root_id _update_config || return 1; }; } || { - ROOT_FOLDER="root" - _update_config "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" "${ROOT_FOLDER}" "${CONFIG}" || return 1 - } - elif [[ -z ${ROOT_FOLDER_NAME} ]]; then - _check_root_id_name _update_config || return 1 # update default root folder name if not available - fi - - # fetch root folder name if rootdir different than default - [[ -z ${ROOT_FOLDER_NAME} ]] && { _check_root_id_name "${UPDATE_DEFAULT_ROOTDIR}" || return 1; } - - return 0 -} - -################################################### -# Setup Workspace folder -# Check if the given folder exists in google drive. -# If not then the folder is created in google drive under the configured root folder. -# Globals: 2 variables, 3 functions -# Variables - FOLDERNAME, ROOT_FOLDER -# Functions - _create_directory, _drive_info, _json_value -# Arguments: None -# Result: Read Description -################################################### -_setup_workspace() { - if [[ -z ${FOLDERNAME} ]]; then - WORKSPACE_FOLDER_ID="${ROOT_FOLDER}" - WORKSPACE_FOLDER_NAME="${ROOT_FOLDER_NAME}" - else - WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}")" || - { printf "%s\n" "${WORKSPACE_FOLDER_ID}" 1>&2 && return 1; } - WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name | _json_value name 1 1)" || - { printf "%s\n" "${WORKSPACE_FOLDER_NAME}" 1>&2 && return 1; } - fi - return 0 -} - -################################################### -# Process all the values in "${FINAL_LOCAL_INPUT_ARRAY[@]}" & "${FINAL_ID_INPUT_ARRAY[@]}" -# Globals: 22 variables, 17 functions -# Variables - FINAL_LOCAL_INPUT_ARRAY ( array ), ACCESS_TOKEN, VERBOSE, VERBOSE_PROGRESS -# WORKSPACE_FOLDER_ID, UPLOAD_MODE, SKIP_DUPLICATES, OVERWRITE, SHARE, -# UPLOAD_STATUS, COLUMNS, API_URL, API_VERSION, TOKEN_URL, LOG_FILE_ID -# FILE_ID, FILE_LINK, FINAL_ID_INPUT_ARRAY ( array ) -# PARALLEL_UPLOAD, QUIET, NO_OF_PARALLEL_JOBS, TMPFILE, SHARE_ROLE -# Functions - _print_center, _clear_line, _newline, _support_ansi_escapes, _print_center_quiet -# _upload_file, _share_id, _is_terminal, _dirname, -# _create_directory, _json_value, _url_encode, _check_existing_file, _bytes_to_human -# _clone_file, _get_access_token_and_update, _get_rootdir_id -# Arguments: None -# Result: Upload/Clone all the input files/folders, if a folder is empty, print Error message. -################################################### -_process_arguments() { - # on successful uploads - _share_and_print_link() { - "${SHARE:-:}" "${1:-}" "${SHARE_ROLE}" "${SHARE_EMAIL}" - [[ -z ${HIDE_INFO} ]] && { - _print_center "justify" "DriveLink" "${SHARE:+ (SHARED[${SHARE_ROLE:0:1}])}" "-" - _support_ansi_escapes && [[ ${COLUMNS} -gt 45 ]] && _print_center "normal" "↓ ↓ ↓" ' ' - "${QUIET:-_print_center}" "normal" "https://drive.google.com/open?id=${1:-}" " " - } - return 0 - } - - for input in "${FINAL_LOCAL_INPUT_ARRAY[@]}"; do - # Check if the argument is a file or a directory. - if [[ -f ${input} ]]; then - # export DESCRIPTION_FILE, used for descriptions in _upload_file function - export DESCRIPTION_FILE="${DESCRIPTION}" - - _print_center "justify" "Given Input" ": FILE" "=" - _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" - _upload_file_main noparse "${input}" "${WORKSPACE_FOLDER_ID}" - if [[ ${RETURN_STATUS} = 1 ]]; then - _share_and_print_link "${FILE_ID}" - printf "\n" - else - for _ in 1 2; do _clear_line 1; done && continue - fi - elif [[ -d ${input} ]]; then - input="$(cd "${input}" && pwd)" || return 1 # to handle _dirname when current directory (.) is given as input. - unset EMPTY # Used when input folder is empty - - # export DESCRIPTION_FILE only if DESCRIPTION_ALL var is available, used for descriptions in _upload_file function - export DESCRIPTION_FILE="${DESCRIPTION_ALL:+${DESCRIPTION}}" - - _print_center "justify" "Given Input" ": FOLDER" "-" - _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" - FOLDER_NAME="${input##*/}" && "${EXTRA_LOG}" "justify" "Folder: ${FOLDER_NAME}" "=" - - NEXTROOTDIRID="${WORKSPACE_FOLDER_ID}" - - "${EXTRA_LOG}" "justify" "Processing folder.." "-" - - [[ -z ${SKIP_SUBDIRS} ]] && "${EXTRA_LOG}" "justify" "Indexing subfolders.." "-" - # Do not create empty folders during a recursive upload. Use of find in this section is important. - mapfile -t DIRNAMES <<< "$(find "${input}" -type d -not -empty)" - NO_OF_FOLDERS="${#DIRNAMES[@]}" && NO_OF_SUB_FOLDERS="$((NO_OF_FOLDERS - 1))" - [[ -z ${SKIP_SUBDIRS} ]] && _clear_line 1 - [[ ${NO_OF_SUB_FOLDERS} = 0 ]] && SKIP_SUBDIRS="true" - - "${EXTRA_LOG}" "justify" "Indexing files.." "-" - mapfile -t FILENAMES <<< "$(_tmp='find "'${input}'" -type f -name "*" '${INCLUDE_FILES}' '${EXCLUDE_FILES}'' && eval "${_tmp}")" - _clear_line 1 - - # Skip the sub folders and find recursively all the files and upload them. - if [[ -n ${SKIP_SUBDIRS} ]]; then - if [[ -n ${FILENAMES[0]} ]]; then - for _ in 1 2; do _clear_line 1; done - NO_OF_FILES="${#FILENAMES[@]}" - - "${QUIET:-_print_center}" "justify" "Folder: ${FOLDER_NAME} " "| ${NO_OF_FILES} File(s)" "=" && printf "\n" - "${EXTRA_LOG}" "justify" "Creating folder.." "-" - { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}")" && export ID; } || - { "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; } - _clear_line 1 && DIRIDS="${ID}" - - [[ -z ${PARALLEL_UPLOAD:-${VERBOSE:-${VERBOSE_PROGRESS}}} ]] && _newline "\n" - _upload_folder "${PARALLEL_UPLOAD:-normal}" noparse "$(printf "%s\n" "${FILENAMES[@]}")" "${ID}" - [[ -n ${PARALLEL_UPLOAD:+${VERBOSE:-${VERBOSE_PROGRESS}}} ]] && _newline "\n\n" - else - for _ in 1 2; do _clear_line 1; done && EMPTY=1 - fi - else - if [[ -n ${FILENAMES[0]} ]]; then - for _ in 1 2; do _clear_line 1; done - NO_OF_FILES="${#FILENAMES[@]}" - "${QUIET:-_print_center}" "justify" "${FOLDER_NAME} " "| ${NO_OF_FILES} File(s) | ${NO_OF_SUB_FOLDERS} Sub-folders" "=" - - _newline "\n" && "${EXTRA_LOG}" "justify" "Creating Folder(s).." "-" && _newline "\n" - unset status DIRIDS - for dir in "${DIRNAMES[@]}"; do - [[ -n ${status} ]] && __dir="$(_dirname "${dir}")" && - __temp="$(printf "%s\n" "${DIRIDS}" | grep -F "|:_//_:|${__dir}|:_//_:|")" && - NEXTROOTDIRID="${__temp%%"|:_//_:|${__dir}|:_//_:|"}" - - NEWDIR="${dir##*/}" && _print_center "justify" "Name: ${NEWDIR}" "-" 1>&2 - ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}")" || - { "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; } - - # Store sub-folder directory IDs and it's path for later use. - DIRIDS+="${ID}|:_//_:|${dir}|:_//_:|"$'\n' - - for _ in 1 2; do _clear_line 1 1>&2; done - "${EXTRA_LOG}" "justify" "Status" ": $((status += 1)) / ${NO_OF_FOLDERS}" "=" 1>&2 - done && export DIRIDS - - _clear_line 1 - - _upload_folder "${PARALLEL_UPLOAD:-normal}" parse "$(printf "%s\n" "${FILENAMES[@]}")" - [[ -n ${PARALLEL_UPLOAD:+${VERBOSE:-${VERBOSE_PROGRESS}}} ]] && _newline "\n\n" - else - for _ in 1 2 3; do _clear_line 1; done && EMPTY=1 - fi - fi - if [[ ${EMPTY} != 1 ]]; then - [[ -z ${VERBOSE:-${VERBOSE_PROGRESS}} ]] && for _ in 1 2; do _clear_line 1; done - - FOLDER_ID="$(: "${DIRIDS%%$'\n'*}" && printf "%s\n" "${_/"|:_//_:|"*/}")" - - [[ ${SUCCESS_STATUS} -gt 0 ]] && _share_and_print_link "${FOLDER_ID}" - - _newline "\n" - [[ ${SUCCESS_STATUS} -gt 0 ]] && "${QUIET:-_print_center}" "justify" "Total Files " "Uploaded: ${SUCCESS_STATUS}" "=" - [[ ${ERROR_STATUS} -gt 0 ]] && "${QUIET:-_print_center}" "justify" "Total Files " "Failed: ${ERROR_STATUS}" "=" && { - # If running inside a terminal, then check if failed files are more than 25, if not, then print, else save in a log file - if [[ -t 1 ]]; then - { [[ ${ERROR_STATUS} -le 25 ]] && printf "%s\n" "${ERROR_FILES}"; } || { - epoch_time="$(printf "%(%s)T\\n" "-1")" log_file_name="${0##*/}_${FOLDER_NAME}_${epoch_time}.failed" - # handle in case the vivid random file name was already there - i=0 && until ! [[ -f ${log_file_name} ]]; do - : $((i += 1)) && log_file_name="${0##*/}_${FOLDER_NAME}_$((epoch_time + i)).failed" - done - printf "%s\n%s\n%s\n\n%s\n%s\n" \ - "Folder name: ${FOLDER_NAME} | Folder ID: ${FOLDER_ID}" \ - "Run this command to retry the failed uploads:" \ - " ${0##*/} --skip-duplicates \"${input}\" --root-dir \"${NEXTROOTDIRID}\" ${SKIP_SUBDIRS:+-s} ${PARALLEL_UPLOAD:+--parallel} ${PARALLEL_UPLOAD:+${NO_OF_PARALLEL_JOBS}}" \ - "Failed files:" \ - "${ERROR_FILES}" >> "${log_file_name}" - printf "%s\n" "To see the failed files, open \"${log_file_name}\"" - printf "%s\n" "To retry the failed uploads only, use -d / --skip-duplicates flag. See log file for more help." - - } - # if not running inside a terminal, print it all - else - printf "%s\n" "${ERROR_FILES}" - fi - } - printf "\n" - else - for _ in 1 2 3; do _clear_line 1; done - "${QUIET:-_print_center}" 'justify' "Empty Folder" ": ${FOLDER_NAME}" "=" 1>&2 - printf "\n" - fi - fi - done - - unset Aseen && declare -A Aseen - for gdrive_id in "${FINAL_ID_INPUT_ARRAY[@]}"; do - { [[ ${Aseen[${gdrive_id}]} ]] && continue; } || Aseen[${gdrive_id}]=x - _print_center "justify" "Given Input" ": ID" "=" - "${EXTRA_LOG}" "justify" "Checking if id exists.." "-" - [[ ${CHECK_MODE} = "md5Checksum" ]] && declare param="md5Checksum" - json="$(_drive_info "${gdrive_id}" "name,mimeType,size${param:+,${param}}" || :)" - if ! _json_value code 1 1 <<< "${json}" 2>| /dev/null 1>&2; then - type="$(_json_value mimeType 1 1 <<< "${json}" || :)" - name="$(_json_value name 1 1 <<< "${json}" || :)" - size="$(_json_value size 1 1 <<< "${json}" || :)" - [[ ${CHECK_MODE} = "md5Checksum" ]] && md5="$(_json_value md5Checksum 1 1 <<< "${json}" || :)" - for _ in 1 2; do _clear_line 1; done - if [[ ${type} =~ folder ]]; then - # export DESCRIPTION_FILE only if DESCRIPTION_ALL var is available, used for descriptions in _clone_file function - export DESCRIPTION_FILE="${DESCRIPTION_ALL+:${DESCRIPTION}}" - - "${QUIET:-_print_center}" "justify" "Folder not supported." "=" 1>&2 && _newline "\n" 1>&2 && continue - ## TODO: Add support to clone folders - else - # export DESCRIPTION_FILE, used for descriptions in _clone_file function - export DESCRIPTION_FILE="${DESCRIPTION}" - - _print_center "justify" "Given Input" ": File ID" "=" - _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" - _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${name}" "${size}" "${md5}" || - { for _ in 1 2; do _clear_line 1; done && continue; } - fi - _share_and_print_link "${FILE_ID}" - printf "\n" - else - _clear_line 1 - "${QUIET:-_print_center}" "justify" "File ID (${HIDE_INFO:-gdrive_id})" " invalid." "=" 1>&2 - printf "\n" - fi - done - return 0 -} - -main() { - [[ $# = 0 ]] && _short_help - - [[ -z ${SELF_SOURCE} ]] && { - export UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" - { . "${UTILS_FOLDER}"/auth-utils.bash && . "${UTILS_FOLDER}"/common-utils.bash && . "${UTILS_FOLDER}"/drive-utils.bash && . "${UTILS_FOLDER}"/upload-utils.bash; } || - { printf "Error: Unable to source util files.\n" && exit 1; } - } - - _check_bash_version && set -o errexit -o noclobber -o pipefail - - _setup_arguments "${@}" || exit 1 - "${SKIP_INTERNET_CHECK:-_check_internet}" || exit 1 - - { { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || - TMPFILE="${PWD}/.$(_t="$(printf "%(%s)T\\n" "-1")" && printf "%s\n" "$((_t * _t))").LOG"; } || exit 1 - export TMPFILE - - _cleanup() { - # unhide the cursor if hidden - [[ -n ${SUPPORT_ANSI_ESCAPES} ]] && printf "\033[?25h\033[?7h" - { - [[ -f ${TMPFILE}_ACCESS_TOKEN ]] && { - # update the config with latest ACCESS_TOKEN and ACCESS_TOKEN_EXPIRY only if changed - . "${TMPFILE}_ACCESS_TOKEN" - [[ ${INITIAL_ACCESS_TOKEN} = "${ACCESS_TOKEN}" ]] || { - _update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN" "${ACCESS_TOKEN}" "${CONFIG}" - _update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN_EXPIRY" "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}" - } - } || : 1>| /dev/null - - # grab all chidren processes of access token service - # https://askubuntu.com/a/512872 - [[ -n ${ACCESS_TOKEN_SERVICE_PID} ]] && { - token_service_pids="$(ps --ppid="${ACCESS_TOKEN_SERVICE_PID}" -o pid=)" - # first kill parent id, then children processes - kill "${ACCESS_TOKEN_SERVICE_PID}" - } || : 1>| /dev/null - - # grab all script children pids - script_children_pids="$(ps --ppid="${MAIN_PID}" -o pid=)" - - # kill all grabbed children processes - # shellcheck disable=SC2086 - kill ${token_service_pids} ${script_children_pids} 1>| /dev/null - - rm -f "${TMPFILE:?}"* - - export abnormal_exit && if [[ -n ${abnormal_exit} ]]; then - printf "\n\n%s\n" "Script exited manually." - kill -- -$$ & - else - { _cleanup_config "${CONFIG}" && [[ ${GUPLOAD_INSTALLED_WITH} = script ]] && _auto_update; } 1>| /dev/null & - fi - } 2>| /dev/null || : - return 0 - } - - trap 'abnormal_exit="1"; exit' INT TERM - trap '_cleanup' EXIT - trap '' TSTP # ignore ctrl + z - - export MAIN_PID="$$" - - START="$(printf "%(%s)T\\n" "-1")" - - "${EXTRA_LOG}" "justify" "Checking credentials.." "-" - { _check_credentials && _clear_line 1; } || - { "${QUIET:-_print_center}" "normal" "[ Error: Credentials checking failed ]" "=" && exit 1; } - "${QUIET:-_print_center}" "normal" " Account: ${ACCOUNT_NAME} " "=" - - "${EXTRA_LOG}" "justify" "Checking root dir.." "-" - { _setup_root_dir && _clear_line 1; } || - { "${QUIET:-_print_center}" "normal" "[ Error: Rootdir setup failed ]" "=" && exit 1; } - _print_center "justify" "Root dir properly configured." "=" - - # only execute next blocks if there was some input - [[ -n ${CONTINUE_WITH_NO_INPUT} ]] && exit 0 - - "${EXTRA_LOG}" "justify" "Checking Workspace Folder.." "-" - { _setup_workspace && for _ in 1 2; do _clear_line 1; done; } || - { "${QUIET:-_print_center}" "normal" "[ Error: Workspace setup failed ]" "=" && exit 1; } - _print_center "justify" "Workspace Folder: ${WORKSPACE_FOLDER_NAME}" "=" - _print_center "normal" " ${WORKSPACE_FOLDER_ID} " "-" && _newline "\n" - - # hide the cursor if ansi escapes are supported - [[ -n ${SUPPORT_ANSI_ESCAPES} ]] && printf "\033[?25l" - - _process_arguments - - END="$(printf "%(%s)T\\n" "-1")" - DIFF="$((END - START))" - "${QUIET:-_print_center}" "normal" " Time Elapsed: ""$((DIFF / 60))"" minute(s) and ""$((DIFF % 60))"" seconds " "=" -} - -{ [[ -z ${SOURCED_GUPLOAD} ]] && main "${@}"; } || : diff --git a/format.sh b/format.sh deleted file mode 100755 index f86d8d0..0000000 --- a/format.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env sh - -if ! command -v shfmt 2>| /dev/null 1>&2; then - printf 'Install shfmt to format script\n\n' - printf 'Check https://github.com/mvdan/sh/releases\n' - exit 1 -fi - -CURRENT_DIR="$(pwd)" -TEMPFILE="${CURRENT_DIR}/$(date +%s)" - -trap 'rm -f "${TEMPFILE}".failedlog "${TEMPFILE}".passedlog' INT TERM EXIT - -for k in . sh bash; do - cd "${k}" 2>| /dev/null 1>&2 || exit 1 - for i in *.*sh; do - if ! shfmt -w "${i}"; then - printf "%s\n\n" "${k}/${i}: ERROR" >> "${TEMPFILE}".failedlog - else - printf "%s\n" "${k}/${i}: SUCCESS" >> "${TEMPFILE}".passedlog - fi - done - cd - 2>| /dev/null 1>&2 || exit 1 -done - -if [ -f "${TEMPFILE}.failedlog" ]; then - printf '\nError: Cannot format some files.\n\n' - grep '.*' "${TEMPFILE}".failedlog && printf "\n" - grep '.*' "${TEMPFILE}".passedlog - exit 1 -else - printf 'All files formatted successfully.\n\n' - grep '.*' "${TEMPFILE}".passedlog - exit 0 -fi diff --git a/format_and_lint.sh b/format_and_lint.sh new file mode 100644 index 0000000..c42857f --- /dev/null +++ b/format_and_lint.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env sh + +# shfmt - beautify scripts +if command -v shfmt 1>| /dev/null; then + printf "Beautifying scripts with shfmt... \n" + for k in . src/common src/sh src/bash; do + cd "${k}" 2>| /dev/null 1>&2 || exit 1 + for i in *.*sh; do + if ! shfmt -w "${i}"; then + printf "%s\n\n" "${k}/${i}: ERROR" + format_status=1 + else + printf "%s\n" "${k}/${i}: SUCCESS" + fi + done + cd - 2>| /dev/null 1>&2 || exit 1 + done + format_status="${format_status:-0}" + printf "\n" +else + printf 'Install shfmt to format script\n\n' + printf 'Check https://github.com/mvdan/sh/releases\n\n' +fi + +# shell check - lint script +if command -v shellcheck 1>| /dev/null; then + printf "Linting scripts with shellcheck... \n" + for k in . src/common src/sh src/bash; do + cd "${k}" 2>| /dev/null 1>&2 || exit 1 + for i in *.*sh; do + if ! shellcheck -o all -e SC2312 "${i}"; then + printf "\n%s\n\n" "${k}/${i}: ERROR" + lint_status=1 + else + printf "%s\n" "${k}/${i}: SUCCESS" + fi + done + cd - 2>| /dev/null 1>&2 || exit 1 + done + lint_status="${lint_status:-0}" + printf "\n" +else + printf 'Install shellcheck to lint script.\n' + printf 'Check https://www.shellcheck.net/ or https://github.com/koalaman/shellcheck\n\n' +fi + +[ "${format_status}" = 1 ] && + printf "Error: Some files not formatted succesfully.\n" + +[ "${lint_status}" = 1 ] && + printf "Error: Some shellcheck warnings need to be fixed.\n" + +if [ "${lint_status}" = 1 ] || [ "${format_status}" = 1 ]; then + exit 1 +else + exit 0 +fi diff --git a/install.sh b/install.sh old mode 100755 new mode 100644 index 58b7692..63a38b0 --- a/install.sh +++ b/install.sh @@ -3,22 +3,22 @@ # shellcheck source=/dev/null _usage() { - printf "%s\n" " + printf "%s" " The script can be used to install google-drive-upload script in your system.\n Usage: ${0##*/} [options.. ]\n All flags are optional.\n Options:\n - -p | --path - Custom path where you want to install script.\nDefault Path: ${HOME}/.google-drive-upload \n + -p | --path - Custom path where you want to install script.\nDefault Path: ${HOME}/.google-drive-upload. \n -c | --cmd - Custom command name, after installation script will be available as the input argument. To change sync command name, use %s -c gupload sync='gsync' Default upload command: gupload Default sync command: gsync\n -r | --repo - Upload script from your custom repo,e.g --repo labbots/google-drive-upload, make sure your repo file structure is same as official repo.\n -R | --release - Specify tag name for the github repo, applies to custom and default repo both.\n - -B | --branch - Specify branch name for the github repo, applies to custom and default repo both.\n - -s | --shell-rc - Specify custom rc file, where PATH is appended, by default script detects .zshrc and .bashrc.\n + -b | -B | --branch - Specify branch name for the github repo, applies to custom and default repo both.\n + -s | --shell-rc - Specify custom rc file, where PATH is supposed to be appended, by default script detects .zshrc and .bashrc.\n -t | --time 'no of days' - Specify custom auto update time ( given input will taken as number of days ) after which script will try to automatically update itself.\n - Default: 5 ( 5 days )\n + Default: 5 days\n --skip-internet-check - Like the flag says.\n --sh | --posix - Force install posix scripts even if system has compatible bash binary present.\n -q | --quiet - Only show critical error/sucess logs.\n @@ -35,8 +35,6 @@ _short_help() { ################################################### # Check if debug is enabled and enable command trace -# Globals: 2 variables -# Varibles - DEBUG, QUIET # Arguments: None # Result: If DEBUG # Present - Enable command trace and change print functions to avoid spamming. @@ -52,12 +50,11 @@ _check_debug() { else if [ -z "${QUIET}" ]; then # check if running in terminal and support ansi escape sequences - case "${TERM}" in - xterm* | rxvt* | urxvt* | linux* | vt* | screen* | st*) ansi_escapes="true" ;; - esac - if [ -t 2 ] && [ -n "${ansi_escapes}" ]; then - ! COLUMNS="$(_get_columns_size)" || [ "${COLUMNS:-0}" -lt 45 ] 2>| /dev/null && + if _support_ansi_escapes; then + if ! _required_column_size; then _print_center() { { [ $# = 3 ] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } + + fi else _print_center() { { [ $# = 3 ] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } _clear_line() { :; } @@ -115,8 +112,6 @@ _check_dependencies() { ################################################### # Check internet connection. # Probably the fastest way, takes about 1 - 2 KB of data, don't check for more than 10 secs. -# Globals: 2 functions -# _print_center, _clear_line # Arguments: None # Result: On # Success - Nothing @@ -134,7 +129,6 @@ _check_internet() { ################################################### # Move cursor to nth no. of line and clear it to the begining. -# Globals: None # Arguments: 1 # ${1} = Positive integer ( line number ) # Result: Read description @@ -146,8 +140,6 @@ _clear_line() { ################################################### # Detect profile rc file for zsh and bash. # Detects for login shell of the user. -# Globals: 2 Variables -# HOME, SHELL # Arguments: None # Result: On # Success - print profile file @@ -164,73 +156,9 @@ _detect_profile() { printf "%s\n" "${DETECTED_PROFILE}" } -################################################### -# print column size -# use bash or zsh or stty or tput -################################################### -_get_columns_size() { - { command -v bash 1>| /dev/null && bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1'; } || - { command -v zsh 1>| /dev/null && zsh -c 'printf "%s\n" "${COLUMNS}"'; } || - { command -v stty 1>| /dev/null && _tmp="$(stty size)" && printf "%s\n" "${_tmp##* }"; } || - { command -v tput 1>| /dev/null && tput cols; } || - return 1 -} - -################################################### -# Fetch latest commit sha of release or branch -# Do not use github rest api because rate limit error occurs -# Globals: None -# Arguments: 3 -# ${1} = repo name -# ${2} = sha sum or branch name or tag name -# ${3} = path ( optional ) -# Result: print fetched shas -################################################### -_get_files_and_commits() { - repo_get_files_and_commits="${1:-${REPO}}" type_value_get_files_and_commits="${2:-${LATEST_CURRENT_SHA}}" path_get_files_and_commits="${3:-}" - unset html_get_files_and_commits commits_get_files_and_commits files_get_files_and_commits - - # shellcheck disable=SC2086 - html_get_files_and_commits="$(curl -s --compressed "https://github.com/${repo_get_files_and_commits}/file-list/${type_value_get_files_and_commits}/${path_get_files_and_commits}")" || - { _print_center "normal" "Error: Cannot fetch" " update details" "=" 1>&2 && exit 1; } - # just grep the commit/ strings from html, then remove extra info with sed - commits_get_files_and_commits="$(printf "%s\n" "${html_get_files_and_commits}" | grep -o "commit/.*\"" | sed -e 's/commit\///g' -e 's/\"//g' -e 's/>.*//g')" - # only grep blob because we just want files - # shellcheck disable=SC2001 - files_get_files_and_commits="$(printf "%s\n" "${html_get_files_and_commits}" | grep -oE 'blob/'"${type_value_get_files_and_commits}"'.*\"' | sed -e 's/\"//g' -e 's/>.*//g')" - - total_files="$(($(printf "%s\n" "${files_get_files_and_commits}" | wc -l)))" - total_commits="$(($(printf "%s\n" "${commits_get_files_and_commits}" | wc -l)))" - - # exit right out in case wasn't able to grab commits or files - if [ "${total_commits}" -eq "0" ] || [ "${total_files}" -eq "0" ]; then - _print_center "normal" "Error: Cannot fetch" " update details" "=" 1>&2 && exit 1 - fi - - # this is gonna trigger in case of non-release commit sha - if [ "${total_commits}" -gt "${total_files}" ]; then - # delete alternate lines ( sed '{N;P;d}' ), because duplicate for every commit - commits_get_files_and_commits="$(printf "%s\n" "${commits_get_files_and_commits}" | sed -e 'N;P;d')" - fi - - exec 4<< EOF -$(printf "%s\n" "${files_get_files_and_commits}") -EOF - exec 5<< EOF -$(printf "%s\n" "${commits_get_files_and_commits}") -EOF - while read -r file <&4 && read -r commit <&5; do - printf "%s\n" "${file##blob\/${type_value_get_files_and_commits}\/}__.__${commit}" - done - exec 4<&- && exec 5<&- - - return 0 -} - ################################################### # Fetch latest commit sha of release or branch # Do not use github rest api because rate limit error occurs -# Globals: None # Arguments: 3 # ${1} = "branch" or "release" # ${2} = branch name or release name @@ -252,6 +180,7 @@ _get_latest_sha() { _tmp="$(printf "%s\n" "${raw_get_latest_sha}" | grep "=\"/""${3:-${REPO}}""/commit" -m1 || :)" && _tmp="${_tmp##*commit\/}" && printf "%s\n" "${_tmp%%\"*}" )" ;; + *) : ;; esac printf "%b" "${latest_sha_get_latest_sha:+${latest_sha_get_latest_sha}\n}" } @@ -259,8 +188,6 @@ _get_latest_sha() { ################################################### # Print a text to center interactively and fill the rest of the line with text specified. # This function is fine-tuned to this script functionality, so may appear unusual. -# Globals: 1 variable -# COLUMNS # Arguments: 4 # If ${1} = normal # ${2} = text to print @@ -278,7 +205,7 @@ _get_latest_sha() { ################################################### _print_center() { [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 - term_cols_print_center="${COLUMNS}" + term_cols_print_center="${COLUMNS:-}" type_print_center="${1}" filler_print_center="" case "${type_print_center}" in normal) out_print_center="${2}" && symbol_print_center="${3}" ;; @@ -320,9 +247,34 @@ _print_center() { return 0 } +################################################### +# fetch column size and check if greater than the num ( see in function) +# return 1 or 0 +################################################### +_required_column_size() { + COLUMNS="$({ command -v bash 1>| /dev/null && bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1'; } || + { command -v zsh 1>| /dev/null && zsh -c 'printf "%s\n" "${COLUMNS}"'; } || + { command -v stty 1>| /dev/null && _tmp="$(stty size)" && printf "%s\n" "${_tmp##* }"; } || + { command -v tput 1>| /dev/null && tput cols; })" || : + + [ "$((COLUMNS))" -gt 45 ] && return 0 +} + +################################################### +# Check if script terminal supports ansi escapes +# Result: return 1 or 0 +################################################### +_support_ansi_escapes() { + unset ansi_escapes + case "${TERM}" in + xterm* | rxvt* | urxvt* | linux* | vt* | screen*) ansi_escapes="true" ;; + *) : ;; + esac + { [ -t 2 ] && [ -n "${ansi_escapes}" ] && return 0; } || return 1 +} + ################################################### # Alternative to timeout command -# Globals: None # Arguments: 1 and rest # ${1} = amount of time to sleep # rest = command to execute @@ -346,9 +298,6 @@ _timeout() { ################################################### # Initialize default variables -# Globals: 1 variable, 1 function -# Variable - HOME -# Function - _detect_profile # Arguments: None # Result: read description ################################################### @@ -358,11 +307,11 @@ _variables() { SYNC_COMMAND_NAME="gsync" INFO_PATH="${HOME}/.google-drive-upload" INSTALL_PATH="${HOME}/.google-drive-upload/bin" - CONFIG_INFO="${INFO_PATH}/google-drive-upload.configpath" - CONFIG="${HOME}/.googledrive.conf" + INSTALL_RC_STRING="[ -f \"\${HOME}/.google-drive-upload/bin/${COMMAND_NAME}\" ] && [ -x \"\${HOME}/.google-drive-upload/bin\" ] && PATH=\"\${HOME}/.google-drive-upload/bin:\${PATH}\"" TYPE="release" TYPE_VALUE="latest" SHELL_RC="$(_detect_profile)" + # If bash installation, then use bash printf else date LAST_UPDATE_TIME="$(if [ "${INSTALLATION}" = bash ]; then bash -c 'printf "%(%s)T\\n" "-1"' else @@ -372,9 +321,9 @@ _variables() { export GUPLOAD_INSTALLED_WITH="script" [ -n "${SKIP_SYNC}" ] && SYNC_COMMAND_NAME="" - export VALUES_LIST="REPO COMMAND_NAME ${SYNC_COMMAND_NAME:+SYNC_COMMAND_NAME} INSTALL_PATH TYPE TYPE_VALUE SHELL_RC LAST_UPDATE_TIME AUTO_UPDATE_INTERVAL INSTALLATION GUPLOAD_SCRIPT_SHA GSYNC_SCRIPT_SHA GLOBAL_INSTALL PERM_MODE GUPLOAD_INSTALLED_WITH" + export VALUES_LIST="REPO COMMAND_NAME ${SYNC_COMMAND_NAME:+SYNC_COMMAND_NAME} INSTALL_PATH TYPE TYPE_VALUE LAST_UPDATE_TIME AUTO_UPDATE_INTERVAL INSTALLATION GLOBAL_INSTALL PERM_MODE GUPLOAD_INSTALLED_WITH" - VALUES_REGEX="" && for i in VALUES_LIST REPO COMMAND_NAME ${SYNC_COMMAND_NAME:+SYNC_COMMAND_NAME} INSTALL_PATH TYPE TYPE_VALUE SHELL_RC LAST_UPDATE_TIME AUTO_UPDATE_INTERVAL INSTALLATION GUPLOAD_SCRIPT_SHA GSYNC_SCRIPT_SHA GLOBAL_INSTALL PERM_MODE GUPLOAD_INSTALLED_WITH; do + VALUES_REGEX="" && for i in VALUES_LIST ${VALUES_LIST}; do VALUES_REGEX="${VALUES_REGEX:+${VALUES_REGEX}|}^${i}=\".*\".* # added values" done @@ -382,109 +331,36 @@ _variables() { } ################################################### -# For self and automatic updates -################################################### -_print_self_update_code() { - cat << 'EOF' -################################################### -# Automatic updater, only update if script is installed system wide. -# Globals: 5 variables, 2 functions -# COMMAND_NAME, REPO, INSTALL_PATH, TYPE, TYPE_VALUE | _update, _update_value -# Arguments: None -# Result: On -# Update if AUTO_UPDATE_INTERVAL + LAST_UPDATE_TIME less than printf "%(%s)T\\n" "-1" -################################################### -_auto_update() { - export REPO - command -v "${COMMAND_NAME}" 1> /dev/null && - if [ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]; then - current_time="$(date +'%s')" - [ "$((LAST_UPDATE_TIME + AUTO_UPDATE_INTERVAL))" -lt "$(date +'%s')" ] && _update - _update_value LAST_UPDATE_TIME "${current_time}" - fi - return 0 -} - +# Download scripts ################################################### -# Install/Update/uninstall the script. -# Globals: 4 variables -# Varibles - HOME, REPO, TYPE_VALUE, GLOBAL_INSTALL -# Arguments: 1 -# ${1} = uninstall or update -# Result: On -# ${1} = nothing - Update the script if installed, otherwise install. -# ${1} = uninstall - uninstall the script -################################################### -_update() { - job_update="${1:-update}" - [ "${GLOBAL_INSTALL}" = true ] && ! [ "$(id -u)" = 0 ] && printf "%s\n" "Error: Need root access to update." && return 0 - [ "${job_update}" = uninstall ] && job_uninstall="--uninstall" - _print_center "justify" "Fetching ${job_update} script.." "-" - repo_update="${REPO:-labbots/google-drive-upload}" type_value_update="${TYPE_VALUE:-latest}" cmd_update="${COMMAND_NAME:-gupload}" path_update="${INSTALL_PATH:-${HOME}/.google-drive-upload/bin}" - { [ "${TYPE:-}" != branch ] && type_value_update="$(_get_latest_sha release "${type_value_update}" "${repo_update}")"; } || : - if script_update="$(curl --compressed -Ls "https://github.com/${repo_update}/raw/${type_value_update}/install.sh")"; then - _clear_line 1 - printf "%s\n" "${script_update}" | sh -s -- ${job_uninstall:-} --skip-internet-check --cmd "${cmd_update}" --path "${path_update}" - current_time="$(date +'%s')" - [ -z "${job_uninstall}" ] && _update_value LAST_UPDATE_TIME "${current_time}" +_download_file() { + cd "${INSTALL_PATH}" 2>| /dev/null 1>&2 || exit 1 + # make the file writable if present + [ -f "${INSTALL_PATH}/${COMMAND_NAME}" ] && chmod u+w -- "${INSTALL_PATH}/${COMMAND_NAME}" + _print_center "justify" "${COMMAND_NAME}" "-" + # now download the binary + if script_download_file="$(curl -Ls --compressed "https://github.com/${REPO}/raw/${LATEST_CURRENT_SHA}/release/${INSTALLATION:-}/gupload")"; then + # check if the downloaded script has any syntax errors, return 2 will be used later + printf "%s\n" "${script_download_file}" | "${INSTALLATION}" -n || return 2 + printf "%s\n" "${script_download_file}" >| "${COMMAND_NAME}" || return 1 else - _clear_line 1 - "${QUIET:-_print_center}" "justify" "Error: Cannot download" " ${job_update} script." "=" 1>&2 return 1 fi - return 0 -} - -################################################### -# Update in-script values -################################################### -_update_value() { - command_path="${INSTALL_PATH:?}/${COMMAND_NAME}" - value_name="${1:-}" value="${2:-}" - script_without_value_and_shebang="$(grep -v "${value_name}=\".*\".* # added values" "${command_path}" | sed 1d)" - new_script="$( - sed -n 1p "${command_path}" - printf "%s\n" "${value_name}=\"${value}\" # added values" - printf "%s\n" "${script_without_value_and_shebang}" - )" - chmod u+w "${command_path}" && printf "%s\n" "${new_script}" >| "${command_path}" && chmod "a-w-r-x,${PERM_MODE:-u}+r+x" "${command_path}" - return 0 -} -EOF -} - -################################################### -# Download scripts -################################################### -_download_files() { - releases="$(_get_files_and_commits "${REPO}" "${LATEST_CURRENT_SHA}" "${INSTALLATION}/release")" - - cd "${INSTALL_PATH}" 2>| /dev/null 1>&2 || exit 1 - - while read -r line <&4; do - file="${line%%__.__*}" && sha="${line##*__.__}" - - case "${file}" in - *gupload) - local_file="${COMMAND_NAME}" - [ "${GUPLOAD_SCRIPT_SHA}" = "${sha}" ] && continue - GUPLOAD_SCRIPT_SHA="${sha}" - ;; - *gsync) - local_file="${SYNC_COMMAND_NAME}" && [ -n "${SKIP_SYNC}" ] && continue - [ "${GSYNC_SCRIPT_SHA}" = "${sha}" ] && continue - GSYNC_SCRIPT_SHA="${sha}" - ;; - esac - - _print_center "justify" "${local_file}" "-" && [ -f "${local_file}" ] && chmod u+w "${local_file}" - # shellcheck disable=SC2086 - ! curl -s --compressed "https://raw.githubusercontent.com/${REPO}/${sha}/${file}" -o "${local_file}" && return 1 - _clear_line 1 - done 4<< EOF -$(printf "%s\n" "${releases}") -EOF + _clear_line 1 + [ -n "${SKIP_SYNC}" ] && return 0 + # make the file writable if present + [ -f "${INSTALL_PATH}/${SYNC_COMMAND_NAME}" ] && chmod u+w -- "${INSTALL_PATH}/${SYNC_COMMAND_NAME}" + _print_center "justify" "${SYNC_COMMAND_NAME}" "-" + # now download the binary + if script_download_file="$(curl -Ls --compressed "https://github.com/${REPO}/raw/${LATEST_CURRENT_SHA}/release/${INSTALLATION:-}/gsync")"; then + # check if the downloaded script has any syntax errors, return 2 will be used later + printf "%s\n" "${script_download_file}" | "${INSTALLATION}" -n || return 2 + printf "%s\n" "${script_download_file}" >| "${SYNC_COMMAND_NAME}" || return 1 + else + return 1 + fi + _clear_line 1 cd - 2>| /dev/null 1>&2 || exit 1 return 0 } @@ -494,39 +370,34 @@ EOF ################################################### _inject_values() { shebang="$(sed -n 1p "${INSTALL_PATH}/${COMMAND_NAME}")" - script_without_values_and_shebang="$(grep -vE "${VALUES_REGEX}|^LATEST_INSTALLED_SHA=\".*\".* # added values" "${INSTALL_PATH}/${COMMAND_NAME}" | sed 1d)" - chmod u+w "${INSTALL_PATH}/${COMMAND_NAME}" + script_without_values_and_shebang="$(grep -vE "${VALUES_REGEX}" -- "${INSTALL_PATH}/${COMMAND_NAME}" | sed 1d)" { printf "%s\n" "${shebang}" - for i in VALUES_LIST REPO COMMAND_NAME ${SYNC_COMMAND_NAME:+SYNC_COMMAND_NAME} INSTALL_PATH TYPE TYPE_VALUE SHELL_RC LAST_UPDATE_TIME AUTO_UPDATE_INTERVAL INSTALLATION GUPLOAD_SCRIPT_SHA GSYNC_SCRIPT_SHA GLOBAL_INSTALL PERM_MODE GUPLOAD_INSTALLED_WITH; do + for i in VALUES_LIST ${VALUES_LIST}; do printf "%s\n" "${i}=\"$(eval printf "%s" \"\$"${i}"\")\" # added values" done printf "%s\n" "LATEST_INSTALLED_SHA=\"${LATEST_CURRENT_SHA}\" # added values" - _print_self_update_code # inject the self and auto update functions printf "%s\n" "${script_without_values_and_shebang}" } 1>| "${INSTALL_PATH}/${COMMAND_NAME}" [ -n "${SKIP_SYNC}" ] && return 0 - sync_script="$(sed "s|gupload|${COMMAND_NAME}|g" "${INSTALL_PATH}/${SYNC_COMMAND_NAME}")" - chmod u+w "${INSTALL_PATH}/${SYNC_COMMAND_NAME}" - printf "%s\n" "${sync_script}" >| "${INSTALL_PATH}/${SYNC_COMMAND_NAME}" + script_without_values_and_shebang="$(grep -vE "${VALUES_REGEX}" -- "${INSTALL_PATH}/${SYNC_COMMAND_NAME}" | sed 1d)" + printf "%s\n" "${script_without_values_and_shebang}" | sed "s|gupload|${COMMAND_NAME}|g" 1>| "${INSTALL_PATH}/${SYNC_COMMAND_NAME}" } ################################################### -# Install/Update the upload and sync script -# Globals: 11 variables, 5 functions -# Variables - INSTALL_PATH, INFO_PATH, UTILS_FILE, COMMAND_NAME, SYNC_COMMAND_NAME, SHELL_RC, -# TYPE, TYPE_VALUE, REPO, VALUES_LIST ( array ), IN_PATH, GLOBAL_PERMS -# Functions - _print_center, _newline, _clear_line -# _get_latest_sha, _inject_values +# Install/Update the download script # Arguments: None # Result: read description -# If cannot download, then print message and exit +# If cannot upload, then print message and exit ################################################### _start() { job="${1:-install}" - [ "${job}" = install ] && mkdir -p "${INFO_PATH}" && _print_center "justify" 'Installing google-drive-upload..' "-" + [ "${job}" = install ] && { + mkdir -p "${INSTALL_PATH}" + _print_center "justify" 'Installing google-drive-upload..' "-" + } _print_center "justify" "Fetching latest version info.." "-" LATEST_CURRENT_SHA="$(_get_latest_sha "${TYPE}" "${TYPE_VALUE}" "${REPO}")" @@ -539,66 +410,46 @@ _start() { } _print_center "justify" "Downloading scripts.." "-" - if _download_files; then - _inject_values || { "${QUIET:-_print_center}" "normal" "Cannot edit installed files" ", check if create a issue on github with proper log." "=" && exit 1; } - - chmod "a-w-r-x,${PERM_MODE:-u}+x+r" "${INSTALL_PATH}/${COMMAND_NAME}" - [ -z "${SKIP_SYNC}" ] && chmod "a-w-r-x,${PERM_MODE:-u}+x+r" "${INSTALL_PATH}/${SYNC_COMMAND_NAME}" - chmod -f +w "${CONFIG_INFO}" && printf "%s\n" "CONFIG=\"${CONFIG}\"" >| "${CONFIG_INFO}" && chmod "a-w-r-x,u+r" "${CONFIG_INFO}" - - [ "${GLOBAL_INSTALL}" = false ] && { - _PATH="PATH=\"${INSTALL_PATH}:\${PATH}\"" - grep -q "${_PATH}" "${SHELL_RC}" 2>| /dev/null || { - (printf "\n%s\n" "${_PATH}" >> "${SHELL_RC}") 2>| /dev/null || { - shell_rc_write="error" - _shell_rc_err_msg() { - "${QUIET:-_print_center}" "normal" " Cannot edit SHELL RC file " "=" && printf "\n" - "${QUIET:-_print_center}" "normal" " ${SHELL_RC} " " " && printf "\n" - "${QUIET:-_print_center}" "normal" " Add below line to your shell rc manually " "-" && printf "\n" - "${QUIET:-_print_center}" "normal" "${_PATH}" " " && printf "\n" - } - } - } - } + _download_file + status_download_file="${?}" + if [ "${status_download_file}" = 0 ]; then + if ! _inject_values; then + "${QUIET:-_print_center}" "normal" "Cannot edit installed files" ", check if create a issue on github with proper log." "=" + exit 1 + fi + + chmod "a-w-r-x,${PERM_MODE:-u}+x+r" -- "${INSTALL_PATH}/${COMMAND_NAME}" for _ in 1 2; do _clear_line 1; done if [ "${job}" = install ]; then - { [ -n "${shell_rc_write}" ] && _shell_rc_err_msg; } || { - "${QUIET:-_print_center}" "justify" "Installed Successfully" "=" - "${QUIET:-_print_center}" "normal" "[ Command name: ${COMMAND_NAME} ]" "=" - [ -z "${SKIP_SYNC}" ] && "${QUIET:-_print_center}" "normal" "[ Sync command name: ${SYNC_COMMAND_NAME} ]" "=" + "${QUIET:-_print_center}" "justify" "Installed Successfully" "=" + "${QUIET:-_print_center}" "normal" "[ Command name: ${COMMAND_NAME} ]" "=" + + [ "${GLOBAL_INSTALL}" = false ] && { + "${QUIET:-_print_center}" "normal" " Add below line to your shell rc manually " "-" && printf "\n" + "${QUIET:-_print_center}" "normal" "${INSTALL_RC_STRING}" " " && printf "\n" + "${QUIET:-_print_center}" "normal" "Run below command" " " && printf "\n" + printf "%s\n\n" "echo '${INSTALL_RC_STRING}' >> ${SHELL_RC}" } + _print_center "justify" "To use the command, do" "-" _newline "\n" && _print_center "normal" ". ${SHELL_RC}" " " _print_center "normal" "or" " " _print_center "normal" "restart your terminal." " " _newline "\n" && _print_center "normal" "To update the script in future, just run ${COMMAND_NAME} -u/--update." " " else - { [ -n "${shell_rc_write}" ] && _shell_rc_err_msg; } || - "${QUIET:-_print_center}" "justify" 'Successfully Updated.' "=" + "${QUIET:-_print_center}" "justify" 'Successfully Updated.' "=" fi - [ -n "${OLD_INSTALLATION_PRESENT}" ] && { - rm -f "${INFO_PATH}/bin/common-utils.${INSTALLATION}" \ - "${INFO_PATH}/bin/drive-utils.${INSTALLATION}" \ - "${INFO_PATH}/google-drive-upload.info" \ - "${INFO_PATH}/google-drive-upload.binpath" - - __bak="${INFO_PATH}/google-drive-upload.binpath" - { grep -qE "(.|source) ${INFO_PATH}" "${SHELL_RC}" 2>| /dev/null && - ! { [ -w "${SHELL_RC}" ] && - _new_rc="$(sed -e "s|. ${__bak}||g" -e "s|source ${__bak}||g" "${SHELL_RC}")" && printf "%s\n" "${_new_rc}" >| "${SHELL_RC}"; } && - { - "${QUIET:-_print_center}" "normal" " Successfully updated but manually need to remove below from ${SHELL_RC} " "=" && printf "\n" - "${QUIET:-_print_center}" "normal" " ${SHELL_RC} " " " && printf "\n" - "${QUIET:-_print_center}" "normal" ". ${INFO_PATH}" " " && printf "\n" - }; } || : - } - else _clear_line 1 - "${QUIET:-_print_center}" "justify" "Cannot download the scripts." "=" + + if [ "${status_download_file}" = 1 ]; then + "${QUIET:-_print_center}" "justify" "Cannot download the scripts." "=" + else + printf "%s\n" "Script downloaded but malformed, try again and if the issue persists open an issue on github." + fi exit 1 fi return 0 @@ -606,79 +457,42 @@ _start() { ################################################### # Uninstall the script -# Globals: 6 variables, 2 functions -# Variables - INSTALL_PATH, INFO_PATH, SKIP_SYNC, GLOBAL_INSTALL, COMMAND_NAME, SHELL_RC -# Functions - _print_center, _clear_line -# Arguments: 1 ( optional ) -# ${1} = minimal - will remove if old method present in shell rc even gupload is not installed +# Arguments: None # Result: read description -# If cannot edit the SHELL_RC, then print message and exit -# Kill all sync jobs that are running +# If cannot edit the SHELL_RC if required, then print message and exit ################################################### _uninstall() { _print_center "justify" "Uninstalling.." "-" - # Kill all sync jobs and remove sync folder - [ -z "${SKIP_SYNC}" ] && command -v "${SYNC_COMMAND_NAME}" 2>| /dev/null 1>&2 && { - "${SYNC_COMMAND_NAME}" -k all 2>| /dev/null 1>&2 || : - chmod -f +w "${INSTALL_PATH}/${SYNC_COMMAND_NAME}" - rm -rf "${INFO_PATH:?}"/sync "${INSTALL_PATH:?}/${SYNC_COMMAND_NAME:?}" - } - _PATH="PATH=\"${INSTALL_PATH}:\${PATH}\"" + chmod -f u+w -- "${INSTALL_PATH}/${COMMAND_NAME}" + rm -f -- "${INSTALL_PATH:?}/${COMMAND_NAME:?}" - _error_message() { - "${QUIET:-_print_center}" "justify" 'Error: Uninstall failed.' "=" - "${QUIET:-_print_center}" "normal" " Cannot edit SHELL RC file " "=" && printf "\n" - "${QUIET:-_print_center}" "normal" " ${SHELL_RC} " " " && printf "\n" - "${QUIET:-_print_center}" "normal" " Remove below line from your shell rc manually " "-" && printf "\n" - "${QUIET:-_print_center}" "normal" " ${1}" " " && printf "\n" - return 1 + [ -z "${SKIP_SYNC}" ] && command -v "${SYNC_COMMAND_NAME}" 2>| /dev/null 1>&2 && { + chmod -f u+w -- "${INSTALL_PATH}/${SYNC_COMMAND_NAME}" + rm -f -- "${INSTALL_PATH:?}/${SYNC_COMMAND_NAME:?}" } - [ "${GLOBAL_INSTALL}" = false ] && { - { grep -q "${_PATH}" "${SHELL_RC}" 2>| /dev/null && - ! { [ -w "${SHELL_RC}" ] && - _new_rc="$(sed -e "s|${_PATH}||g" "${SHELL_RC}")" && printf "%s\n" "${_new_rc}" >| "${SHELL_RC}"; } && - _error_message "${_PATH}"; } || : - } + [ "${GLOBAL_INSTALL}" = false ] && [ -z "$(find "${INSTALL_PATH}" -type f 2>| /dev/null)" ] && rm -rf -- "${INSTALL_PATH:?}" + [ -z "$(find "${INFO_PATH}" -type f 2>| /dev/null)" ] && rm -rf -- "${INFO_PATH:?}" - # just in case old method was present - [ -n "${OLD_INSTALLATION_PRESENT}" ] && { - rm -f "${INFO_PATH}/bin/common-utils.${INSTALLATION}" \ - "${INFO_PATH}/bin/drive-utils.${INSTALLATION}" \ - "${INFO_PATH}/google-drive-upload.info" \ - "${INFO_PATH}/google-drive-upload.binpath" - - __bak="${INFO_PATH}/google-drive-upload.binpath" - { grep -qE "(.|source) ${INFO_PATH}" "${SHELL_RC}" 2>| /dev/null && - ! { [ -w "${SHELL_RC}" ] && - _new_rc="$(sed -e "s|. ${__bak}||g" -e "s|source ${__bak}||g" "${SHELL_RC}")" && printf "%s\n" "${_new_rc}" >| "${SHELL_RC}"; } && - _error_message ". ${INFO_PATH}"; } || : + [ "${GLOBAL_INSTALL}" = false ] && { + "${QUIET:-_print_center}" "normal" " Remove below line from your shell rc manually " "-" && printf "\n" + "${QUIET:-_print_center}" "normal" " Shell rc: ${SHELL_RC} " " " && printf "\n" + printf "%s\n\n" "${INSTALL_RC_STRING}" } - chmod -f +w "${INSTALL_PATH}/${COMMAND_NAME}" "${INFO_PATH}/google-drive-upload.configpath" - rm -f "${INSTALL_PATH:?}/${COMMAND_NAME:?}" "${INFO_PATH}/google-drive-upload.configpath" - - [ "${GLOBAL_INSTALL}" = false ] && [ -z "$(find "${INSTALL_PATH}" -type f)" ] && rm -rf "${INSTALL_PATH:?}" - [ -z "$(find "${INFO_PATH}" -type f)" ] && rm -rf "${INFO_PATH:?}" - - _clear_line 1 _print_center "justify" "Uninstall complete." "=" return 0 } ################################################### # Process all arguments given to the script -# Globals: 1 variable -# Variable - SHELL_RC # Arguments: Many # ${@} = Flags with arguments # Result: read description # If no shell rc file found, then print message and exit ################################################### _setup_arguments() { - unset OLD_INSTALLATION_PRESENT - _check_longoptions() { [ -z "${2}" ] && printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && @@ -700,20 +514,12 @@ _setup_arguments() { -c | --cmd) _check_longoptions "${1}" "${2}" COMMAND_NAME="${2}" && shift - case "${2}" in - sync*) SYNC_COMMAND_NAME="${2##sync=}" && shift ;; - esac ;; - -B | --branch) + -b | -B | --branch) _check_longoptions "${1}" "${2}" TYPE_VALUE="${2}" && shift TYPE=branch ;; - -R | --release) - _check_longoptions "${1}" "${2}" - TYPE_VALUE="${2}" && shift - TYPE=release - ;; -s | --shell-rc) _check_longoptions "${1}" "${2}" SHELL_RC="${2}" && shift @@ -728,11 +534,9 @@ _setup_arguments() { fi ;; --sh | --posix) INSTALLATION="sh" ;; - -q | --quiet) QUIET="_print_center_quiet" ;; + -q | --quiet) QUIET="_print_quiet" ;; --skip-internet-check) SKIP_INTERNET_CHECK=":" ;; - -U | --uninstall) - UNINSTALL="true" - ;; + -U | --uninstall) UNINSTALL="true" ;; -D | --debug) DEBUG="true" && export DEBUG ;; *) printf '%s: %s: Unknown option\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && exit 1 ;; esac @@ -742,15 +546,17 @@ _setup_arguments() { # 86400 secs = 1 day AUTO_UPDATE_INTERVAL="${AUTO_UPDATE_INTERVAL:-432000}" - [ -z "${SHELL_RC}" ] && printf "No default shell file found, use -s/--shell-rc to use custom rc file\n" && exit 1 - INSTALL_PATH="${_INSTALL_PATH:-${INSTALL_PATH}}" mkdir -p "${INSTALL_PATH}" 2> /dev/null || : INSTALL_PATH="$(cd "${INSTALL_PATH%\/*}" && pwd)/${INSTALL_PATH##*\/}" || exit 1 { printf "%s\n" "${PATH}" | grep -q -e "${INSTALL_PATH}:" -e "${INSTALL_PATH}/:" && IN_PATH="true"; } || : + # modify install string literal if path changed + if [ -n "${_INSTALL_PATH}" ]; then + INSTALL_RC_STRING="[ -f \"${INSTALL_PATH}/${COMMAND_NAME}\" ] && [ -x \"${INSTALL_PATH}/${COMMAND_NAME}\" ] && PATH=\"${INSTALL_PATH}:\${PATH}\"" + fi # check if install path outside home dir and running as root - [ -n "${INSTALL_PATH##${HOME}*}" ] && PERM_MODE="a" && GLOBAL_INSTALL="true" && ! [ "$(id -u)" = 0 ] && + [ -n "${INSTALL_PATH##"${HOME}"*}" ] && PERM_MODE="a" && GLOBAL_INSTALL="true" && ! [ "$(id -u)" = 0 ] && printf "%s\n" "Error: Need root access to run the script for given install path ( ${INSTALL_PATH} )." && exit 1 # global dir must be in executable path @@ -763,22 +569,17 @@ _setup_arguments() { } main() { - { command -v bash && [ "$(bash -c 'printf "%s\n" ${BASH_VERSINFO:-0}')" -ge 4 ] && INSTALLATION="bash"; } 2>| /dev/null 1>&2 + { command -v bash && [ "$(bash -c 'printf "%s\n" ${BASH_VERSINFO:-0}')" -ge 4 ] && INSTALLATION="bash"; } 1>| /dev/null _check_dependencies "${?}" && INSTALLATION="${INSTALLATION:-sh}" - set -o errexit -o noclobber + set -o noclobber _variables && _setup_arguments "${@}" _check_existing_command() { if COMMAND_PATH="$(command -v "${COMMAND_NAME}")"; then - if [ -f "${INFO_PATH}/google-drive-upload.info" ] && [ -f "${INFO_PATH}/google-drive-upload.binpath" ] && [ -f "${INFO_PATH}/google-drive-upload.configpath" ]; then - OLD_INSTALLATION_PRESENT="true" && . "${INFO_PATH}/google-drive-upload.info" - CONFIG="$(cat "${CONFIG_INFO}")" - return 0 - elif SCRIPT_VALUES="$(grep -E "${VALUES_REGEX}|^LATEST_INSTALLED_SHA=\".*\".* # added values|^SELF_SOURCE=\".*\"" "${COMMAND_PATH}" || :)" && + if SCRIPT_VALUES="$(grep -E "${VALUES_REGEX}|^LATEST_INSTALLED_SHA=\".*\".* # added values|^SELF_SOURCE=\".*\"" -- "${COMMAND_PATH}" || :)" && eval "${SCRIPT_VALUES}" 2> /dev/null && [ -n "${LATEST_INSTALLED_SHA:+${SELF_SOURCE}}" ]; then - [ -f "${CONFIG_INFO}" ] && . "${CONFIG_INFO}" return 0 else printf "%s\n" "Error: Cannot validate existing installation, make sure no other program is installed as ${COMMAND_NAME}." @@ -791,12 +592,12 @@ main() { fi } - trap '' TSTP # ignore ctrl + z - if [ -n "${UNINSTALL}" ]; then - { _check_existing_command && _uninstall; } || - { "${QUIET:-_print_center}" "justify" "google-drive-upload is not installed." "="; } - exit 0 + { _check_existing_command && _uninstall; } || { + _uninstall 2>| /dev/null 1>&2 || : + "${QUIET:-_print_center}" "justify" "google-drive-upload is not installed." "=" + exit 0 + } else "${SKIP_INTERNET_CHECK:-_check_internet}" { _check_existing_command && _start update; } || { diff --git a/merge.sh b/merge.sh deleted file mode 100755 index 7dd950f..0000000 --- a/merge.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env sh - -set -e - -command -v shfmt 1>| /dev/null && ./format.sh && printf "\n" - -_merge() ( - shell="${1:?Error: give folder name.}" - - cd "${shell}" 2>| /dev/null 1>&2 || exit 1 - mkdir -p release - - for file in upload sync; do - { - sed -n 1p "${file}.${shell}" - printf "%s\n" "SELF_SOURCE=\"true\"" - sed 1d common-utils."${shell}" - [ "${file}" = upload ] && - sed 1d auth-utils."${shell}" && - sed 1d drive-utils."${shell}" && - sed 1d upload-utils."${shell}" - sed 1d "${file}.${shell}" - } >| "release/g${file}" - chmod +x "release/g${file}" - done - - printf "%s\n" "${shell} done." -) - -_merge sh -_merge bash diff --git a/release.sh b/release.sh new file mode 100644 index 0000000..972fb45 --- /dev/null +++ b/release.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env sh + +set -e + +sh format_and_lint.sh + +printf "Merging Scripts and minifying...\n" + +_PARENT_DIR="${PWD}" + +cd src || exit 1 + +_merge() ( + shell="${1:?Error: give folder name.}" + { [ "${shell}" = "sh" ] && flag="-p"; } || flag="" + + mkdir -p "${_PARENT_DIR}/release/${shell}" + release_path="${_PARENT_DIR}/release/${shell}" + + # gupload + { + sed -n 1p "upload.${shell}" + printf "%s\n" 'SELF_SOURCE="true"' + # shellcheck disable=SC2086 + { + # this is to export the functions so that can used in parallel functions + echo 'set -a' + sed 1d "${shell}/common-utils.${shell}" + for script in \ + update.sh \ + parser.sh \ + upload-flags.sh \ + auth-utils.sh \ + common-utils.sh \ + drive-utils.sh \ + upload-utils.sh \ + upload-common.sh; do + sed 1d "common/${script}" + done + echo 'set +a' + sed 1d "upload.${shell}" + } | shfmt -mn ${flag} + } >| "${release_path}/gupload" + chmod +x "${release_path}/gupload" + + printf "%s\n" "${release_path}/gupload done." + + # gsync + { + sed -n 1p "sync.${shell}" + printf "%s\n" 'SELF_SOURCE="true"' + # shellcheck disable=SC2086 + { + # this is to export the functions so that can used in parallel functions + echo 'set -a' + sed 1d "${shell}/common-utils.${shell}" + for script in \ + parser.sh \ + sync-flags.sh \ + common-utils.sh; do + sed 1d "common/${script}" + done + echo 'set +a' + sed 1d "sync.${shell}" + } | shfmt -mn ${flag} + } >| "${release_path}/gsync" + chmod +x "${release_path}/gsync" + + printf "%s\n" "${release_path}/gsync done." +) + +_merge sh +_merge bash diff --git a/release/bash/gsync b/release/bash/gsync new file mode 100755 index 0000000..321736c --- /dev/null +++ b/release/bash/gsync @@ -0,0 +1,985 @@ +#!/usr/bin/env bash +SELF_SOURCE="true" +set -a +_assert_regex(){ +declare pattern="${1:?Error: Missing pattern}" string="${2:?Missing string}" +if [[ $string =~ $pattern ]];then +return 0 +else +return 1 +fi +} +cat(){ +for file in "$@";do +printf "%s\n" "$(<"$file")" +done +} +_count(){ +mapfile -tn 0 lines +printf '%s\n' "${#lines[@]}" +} +_epoch(){ +printf '%(%s)T\n' "-1" +} +_required_column_size(){ +shopt -s checkwinsize&&(:&&:) +if [[ $COLUMNS -gt 45 ]];then +trap 'shopt -s checkwinsize; (:;:)' SIGWINCH +return 0 +else +return 1 +fi +} +_set_value(){ +case "${1:?}" in +d|direct)export "${2:?}=$3";; +i|indirect)export "${2:?}=${!3}";; +*)return 1 +esac +} +_trim(){ +declare char="$1" str="$2" var="$3" +if [[ -n $var ]];then +_set_value d "$var" "${str//$char/}" +else +printf "%s" "${str//$char/}" +fi +} +_url_encode(){ +declare LC_ALL=C +for ((i=0; i<${#1}; i++));do +: "${1:i:1}" +case "$_" in +[a-zA-Z0-9.~_-])printf '%s' "$_" +;; +*)printf '%%%02X' "'$_" +esac +done 2>|/dev/null +printf '\n' +} +_is_fd_open(){ +for fd in ${1:?};do +if ! { true >&"$fd";} 2<>/dev/null;then +printf "%s\n" "Error: fd $fd not open." +return 1 +fi +done +} +_parser_add_help(){ +_PARSER_ALL_HELP="$_PARSER_ALL_HELP +${__PARSER_BAR:-} +${1:-}" 2>|/dev/null +} +_parser_check_arguments(){ +nargs_parser_check_arguments="$((${1:?_parser_check_arguments}))" +num_parser_check_arguments=$(($#-2)) +[ "$num_parser_check_arguments" -lt "$nargs_parser_check_arguments" ]&&{ +printf "%s\n" "${0##*/}: $2: flag requires $nargs_parser_check_arguments argument." +printf "\n%s\n" "Help:" +printf "%s\n" "$(_usage "$2")" +exit 1 +} +return 0 +} +_flag_exists(){ +tmp_flag_exists="" option_flag_exists="" +_flag_help "${1:?}" tmp_flag_exists option_flag_exists +[ -z "$tmp_flag_exists" ]&&return 1 +_set_value d "${2:?}" "$option_flag_exists" +} +_flag_help(){ +flag_flag_help="" +_trim "-" "${1:?_flag_help}" flag_flag_help +_set_value i "${2:?_flag_help}" "_parser__help_$flag_flag_help" +_set_value d "${3:-_}" "$flag_flag_help" +} +_parse_arguments(){ +__NEWLINE=" +" +_parse_support_ansi_escapes(){ +case "$TERM" in +xterm*|rxvt*|urxvt*|linux*|vt*|screen*){ [ -t 2 ]&&return 0;}||return 1;; +*): +esac +{ [ -t 2 ]&&return 0;}||return 1 +} +_parser_required_column_size(){ +COLUMNS="$({ command -v bash 1>|/dev/null&&bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1';}||{ command -v zsh 1>|/dev/null&&zsh -c 'printf "%s\n" "${COLUMNS}"';}||{ command -v stty 1>|/dev/null&&_tmp="$(stty size)"&&printf "%s\n" "${_tmp##* }";}||{ command -v tput 1>|/dev/null&&tput cols;})"||: +[ "$((COLUMNS))" -gt 45 ]&&return 0 +} +_parse_support_ansi_escapes&&_parser_required_column_size&&__PARSER_BAR="$(\ +filler='' \ +symbol='_' +i=1&&while [ "$i" -le "$COLUMNS" ];do +filler="$filler$symbol"&&i="$((i+1))" +done +printf "%s\n" "$filler")" +__PARSER_BAR="${__PARSER_BAR:+$__PARSER_BAR$__NEWLINE}" +unset _PARSER_ALL_HELP _PARSER_ARGS_SHIFT _PARSER_PREPROCESS_FUNCTION +unset _PARSER_FLAGS _PARSER_CURRENT_FLAGS _PARSER_CURRENT_NARGS _PARSER_CURRENT_ARGS _PARSER_CURRENT_ARGS_TYPE +"${1:?_parse_arguments - 1: Missing funtion name to setup flags}"||return 1 +shift 2>|/dev/null +_parser_run_preprocess||return 1 +while [ "$#" -gt 0 ];do +case "$1" in +''):;; +--)shift +while [ "$#" -gt 0 ];do +_parser_process_input "$@"||return 1 +shift +done +;; +-*)\ +flag_parse_arguments="" +if _flag_exists "$1" flag_parse_arguments;then +"_parser_process_$flag_parse_arguments" "$@"||return 1 +else +printf "%s\n\n" "${0##*/}: $1: Unknown option" +_short_help +fi +;; +*)_parser_process_input "$@"||return 1 +esac +_PARSER_ARGS_SHIFT="$((_PARSER_ARGS_SHIFT+1))" +shift "$_PARSER_ARGS_SHIFT" +_PARSER_ARGS_SHIFT="0" +done +return 0 +} +_parser_setup_flag(){ +_PARSER_CURRENT_FLAGS="" tmp_parser_setup_flag="" +_PARSER_FLAGS="${1:?_parser_setup_flag}" +for f in $_PARSER_FLAGS;do +_trim "-" "$f" tmp_parser_setup_flag +_PARSER_CURRENT_FLAGS="$_PARSER_CURRENT_FLAGS $tmp_parser_setup_flag" +done +_PARSER_CURRENT_NARGS="${2:?_parser_setup_flag}" +_PARSER_CURRENT_ARGS_TYPE="$3" +_PARSER_CURRENT_ARGS="$4" +} +_parser_setup_flag_help(){ +flags_parser_setup_flag_help="${_PARSER_CURRENT_FLAGS:?_parser_setup_flag_help}" +nargs_parser_setup_flag_help="${_PARSER_CURRENT_NARGS:?_parser_setup_flag_help}" +unset start_parser_setup_flag_help \ +help_parser_setup_flag_help \ +arg_parser_setup_flag_help \ +all_parser_setup_flag_help +while IFS= read -r line <&4;do +help_parser_setup_flag_help="$help_parser_setup_flag_help + $line" +done 4<|/dev/null +} +_parser_setup_flag_preprocess(){ +_is_fd_open 4||return 1 +unset fn_parser_setup_flag_preprocess +while IFS= read -r line <&4;do +fn_parser_setup_flag_preprocess="$fn_parser_setup_flag_preprocess +$line" +done +_PARSER_PREPROCESS_FUNCTION="$_PARSER_PREPROCESS_FUNCTION +$fn_parser_setup_flag_preprocess" +} +_parser_setup_flag_process(){ +_is_fd_open 4||return 1 +unset fn_parser_setup_flag_process +if [ "${_PARSER_CURRENT_NARGS:?_parser_setup_flag_process}" -gt 0 ]&&! [ "$_PARSER_CURRENT_ARGS_TYPE" = optional ];then +fn_parser_setup_flag_process="_parser_check_arguments ${_PARSER_CURRENT_NARGS:?_parser_setup_flag_process} \"\${@}\"" +fi +while IFS= read -r line <&4;do +fn_parser_setup_flag_process="$fn_parser_setup_flag_process +$line" +done +for f in ${_PARSER_CURRENT_FLAGS:?_parser_setup_flag_process};do +eval "_parser_process_$f() { $fn_parser_setup_flag_process ; }" +done +} +_parser_run_preprocess(){ +eval "_parser_preprocess_setup() { ${_PARSER_PREPROCESS_FUNCTION:-:} ; }"&&_parser_preprocess_setup +} +_parser_shift(){ +export _PARSER_ARGS_SHIFT="${1:-1}" +} +_short_help(){ +printf "No valid arguments provided, use -h/--help flag to see usage.\n" +exit 0 +} +_set_value(){ +case "${1:?}" in +d|direct)export "${2:?}=$3";; +i|indirect)eval export "$2"=\"\$"$3"\";; +*)return 1 +esac +} +_trim(){ +char_trim="$1" str_trim="$2" var_trim="$3" +set -f +old_ifs="$IFS" +IFS="$char_trim" +set -- $str_trim +IFS= +if [ -n "$var_trim" ];then +_set_value d "$var_trim" "$*" +else +printf "%s" "$*" +fi +IFS="$old_ifs" +set +f +} +_bytes_to_human(){ +b_bytes_to_human="$(printf "%.0f\n" "${1:-0}")" s_bytes_to_human=0 +d_bytes_to_human='' type_bytes_to_human='' +while [ "$b_bytes_to_human" -gt 1024 ];do +d_bytes_to_human="$(printf ".%02d" $((b_bytes_to_human%1024*100/1024)))" +b_bytes_to_human=$((b_bytes_to_human/1024))&&s_bytes_to_human=$((s_bytes_to_human+=1)) +done +j=0&&for i in B KB MB GB TB PB EB YB ZB;do +j="$((j+=1))"&&[ "$((j-1))" = "$s_bytes_to_human" ]&&type_bytes_to_human="$i"&&break +continue +done +printf "%s\n" "$b_bytes_to_human$d_bytes_to_human $type_bytes_to_human" +} +_check_debug(){ +export DEBUG QUIET +if [ -n "$DEBUG" ];then +set -x&&PS4='-> ' +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "$2";}||{ printf "%s%s\n" "$2" "$3";};} +_clear_line(){ :;}&&_move_cursor(){ :;}&&_newline(){ :;} +else +if [ -z "$QUIET" ];then +if _support_ansi_escapes;then +if ! _required_column_size;then +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "[ $2 ]";}||{ printf "%s\n" "[ $2$3 ]";};} +fi +export EXTRA_LOG="_print_center" CURL_PROGRESS="-#" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" +else +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "[ $2 ]";}||{ printf "%s\n" "[ $2$3 ]";};} +_clear_line(){ :;}&&_move_cursor(){ :;} +fi +_newline(){ printf "%b" "$1";} +else +_print_center(){ :;}&&_clear_line(){ :;}&&_move_cursor(){ :;}&&_newline(){ :;} +fi +set +x +fi +} +_check_internet(){ +"${EXTRA_LOG:-}" "justify" "Checking Internet Connection.." "-" +if ! _timeout 10 curl -Is google.com --compressed;then +_clear_line 1 +"${QUIET:-_print_center}" "justify" "Error: Internet connection" " not available." "=" +return 1 +fi +_clear_line 1 +} +_clear_line(){ +printf "\033[%sA\033[2K" "$1" +} +_dirname(){ +dir_dirname="${1:-.}" +dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}"&&[ -n "${dir_dirname##*/*}" ]&&dir_dirname=. +dir_dirname="${dir_dirname%/*}"&&dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" +printf '%s\n' "${dir_dirname:-/}" +} +_display_time(){ +t_display_time="$1" day_display_time="$((t_display_time/60/60/24))" +hr_display_time="$((t_display_time/60/60%24))" min_display_time="$((t_display_time/60%60))" sec_display_time="$((t_display_time%60))" +[ "$day_display_time" -gt 0 ]&&printf '%d days ' "$day_display_time" +[ "$hr_display_time" -gt 0 ]&&printf '%d hrs ' "$hr_display_time" +[ "$min_display_time" -gt 0 ]&&printf '%d minute(s) ' "$min_display_time" +[ "$day_display_time" -gt 0 ]||[ "$hr_display_time" -gt 0 ]||[ "$min_display_time" -gt 0 ]&&printf 'and ' +printf '%d seconds\n' "$sec_display_time" +} +_get_latest_sha(){ +export TYPE TYPE_VALUE REPO +unset latest_sha_get_latest_sha raw_get_latest_sha +case "${1:-$TYPE}" in +branch)\ +latest_sha_get_latest_sha="$(\ +raw_get_latest_sha="$(curl --compressed -s https://github.com/"${3:-$REPO}"/commits/"${2:-$TYPE_VALUE}".atom -r 0-2000)" +_tmp="$(printf "%s\n" "$raw_get_latest_sha"|grep -o 'Commit\/.*<' -m1||:)"&&_tmp="${_tmp##*\/}"&&printf "%s\n" "${_tmp%%<*}")" +;; +release)\ +latest_sha_get_latest_sha="$(\ +raw_get_latest_sha="$(curl -L --compressed -s https://github.com/"${3:-$REPO}"/releases/"${2:-$TYPE_VALUE}")" +_tmp="$(printf "%s\n" "$raw_get_latest_sha"|grep '="/'"${3:-$REPO}""/commit" -m1||:)"&&_tmp="${_tmp##*commit\/}"&&printf "%s\n" "${_tmp%%\"*}")" +;; +*): +esac +printf "%b" "${latest_sha_get_latest_sha:+$latest_sha_get_latest_sha\n}" +} +_json_escape(){ +mode_json_escape="${1:?Missing mode}" input_json_escape="${2:?Provide Input}" output_json_escape="" +if [ "$mode_json_escape" = "j" ];then +output_json_escape="$(printf "%s" "$input_json_escape"|sed \ +-e "s|\\\|\\\\\\\|g" \ +-e "s|\/|\\\/|g" \ +-e 's/\"/\\\"/g' \ +-e "s/$(printf '\t')/\\t/g" \ +-e "s/$(printf '\r')/\\r/g" \ +-e "s/$(printf '\f')/\\f/g")" +else +output_json_escape="$(printf "%s" "$input_json_escape"|sed \ +-e "s/$(printf '\t')/\\t/g" \ +-e "s/$(printf '\r')/\\r/g" \ +-e "s/$(printf '\f')/\\f/g")" +fi +output_json_escape="$(printf "%s" "$output_json_escape"|awk '{printf "%s%s",sep,$0; sep="\\n"} END{print ""}')" +printf "%s" "$output_json_escape" +} +_json_value(){ +{ [ "$2" -gt 0 ] 2>|/dev/null&&no_of_lines_json_value="$2";}||: +{ [ "$3" -gt 0 ] 2>|/dev/null&&num_json_value="$3";}||{ ! [ "$3" = all ]&&num_json_value=1;} +_tmp="$(grep -o "\"$1\"\:.*" ${no_of_lines_json_value:+-m} $no_of_lines_json_value)"||return 1 +printf "%s\n" "$_tmp"|sed -e 's|.*"'"$1""\":||" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "$num_json_value"p||: +return 0 +} +_parse_config(){ +_config_file_parse_config="${1:?Error: Profile config file}" +print_parse_config="${2:-false}" +[ -r "$_config_file_parse_config" ]||{ +printf "%s\n" "Error: Given config file ( $_config_file_parse_config ) is not readable." +return 1 +} +while IFS='=' read -r key val;do +{ [ -n "$key" ]&&[ -n "$val" ]&&[ -n "${key##\#*}" ];}||continue +key="${key#"${key%%[![:space:]]*}"}" +val="${val#"${val%%[![:space:]]*}"}" +key="${key%"${key##*[![:space:]]}"}" +val="${val%"${val##*[![:space:]]}"}" +case "$val" in +\"*\")val="${val#\"}" val="${val%\"}";; +\'*\')val="${val#\'}" val="${val%\'}";; +*): +esac +export "$key=$val" 2>/dev/null||printf "%s\n" "Warning: $key is not a valid variable name." +[ "$print_parse_config" = true ]&&echo "$key=$val" +done <"$_config_file_parse_config" +return 0 +} +_print_center(){ +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +term_cols_print_center="${COLUMNS:-}" +type_print_center="$1" filler_print_center="" +case "$type_print_center" in +normal)out_print_center="$2"&&symbol_print_center="$3";; +justify)if +[ $# = 3 ] +then +input1_print_center="$2" symbol_print_center="$3" to_print_print_center="" out_print_center="" +to_print_print_center="$((term_cols_print_center-5))" +{ [ "${#input1_print_center}" -gt "$to_print_print_center" ]&&out_print_center="[ $(printf "%.${to_print_print_center}s\n" "$input1_print_center")..]";}||{ out_print_center="[ $input1_print_center ]";} +else +input1_print_center="$2" input2_print_center="$3" symbol_print_center="$4" to_print_print_center="" temp_print_center="" out_print_center="" +to_print_print_center="$((term_cols_print_center*47/100))" +{ [ "${#input1_print_center}" -gt "$to_print_print_center" ]&&temp_print_center=" $(printf "%.${to_print_print_center}s\n" "$input1_print_center")..";}||{ temp_print_center=" $input1_print_center";} +to_print_print_center="$((term_cols_print_center*46/100))" +{ [ "${#input2_print_center}" -gt "$to_print_print_center" ]&&temp_print_center="$temp_print_center$(printf "%.${to_print_print_center}s\n" "$input2_print_center").. ";}||{ temp_print_center="$temp_print_center$input2_print_center ";} +out_print_center="[$temp_print_center]" +fi +;; +*)return 1 +esac +str_len_print_center="${#out_print_center}" +[ "$str_len_print_center" -ge "$((term_cols_print_center-1))" ]&&{ +printf "%s\n" "$out_print_center"&&return 0 +} +filler_print_center_len="$(((term_cols_print_center-str_len_print_center)/2))" +i_print_center=1&&while [ "$i_print_center" -le "$filler_print_center_len" ];do +filler_print_center="$filler_print_center$symbol_print_center"&&i_print_center="$((i_print_center+1))" +done +printf "%s%s%s" "$filler_print_center" "$out_print_center" "$filler_print_center" +[ "$(((term_cols_print_center-str_len_print_center)%2))" -ne 0 ]&&printf "%s" "$symbol_print_center" +printf "\n" +return 0 +} +_print_center_quiet(){ +{ [ $# = 3 ]&&printf "%s\n" "$2";}||{ printf "%s%s\n" "$2" "$3";} +} +_support_ansi_escapes(){ +unset ansi_escapes +case "${TERM:-}" in +xterm*|rxvt*|urxvt*|linux*|vt*|screen*)ansi_escapes="true";; +*): +esac +{ [ -t 2 ]&&[ -n "$ansi_escapes" ]&&return 0;}||return 1 +} +_timeout(){ +timeout_timeout="${1:?Error: Specify Timeout}"&&shift +{ +"$@"& +child="$!" +trap -- "" TERM +{ +sleep "$timeout_timeout" +kill -9 "$child" +}& +wait "$child" +} 2>|/dev/null 1>&2 +} +_update_config(){ +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +value_name_update_config="$1" value_update_config="$2" config_path_update_config="$3" +! [ -f "$config_path_update_config" ]&&: >|"$config_path_update_config" +chmod u+w -- "$config_path_update_config"||return 1 +printf "%s\n%s\n" "$(grep -v -e "^$" -e "^$value_name_update_config=" -- "$config_path_update_config"||:)" \ +"$value_name_update_config=\"$value_update_config\"" >|"$config_path_update_config"||return 1 +chmod a-w-r-x,u+r -- "$config_path_update_config"||return 1 +return 0 +} +set +a +_usage(){ +printf "%b" " +The script can be used to sync your local folder to google drive. + +Utilizes google-drive-upload bash scripts.\n +Usage: ${0##*/} [options.. ]\n +Options:\n + -d | --directory - Gdrive foldername.\n + -k | --kill - to kill the background job using pid number ( -p flags ) or used with input, can be used multiple times.\n + -j | --jobs - See all background jobs that were started and still running.\n + Use --jobs v/verbose to more information for jobs.\n + -p | --pid - Specify a pid number, used for --jobs or --kill or --info flags, can be used multiple times.\n + -i | --info - See information about a specific sync using pid_number ( use -p flag ) or use with input, can be used multiple times.\n + -t | --time - Amount of time to wait before try to sync again in background.\n + To set wait time by default, use ${0##*/} -t default='3'. Replace 3 with any positive integer.\n + -l | --logs - To show the logs after starting a job or show log of existing job. Can be used with pid number ( -p flag ). + Note: If multiple pid numbers or inputs are used, then will only show log of first input as it goes on forever. + -a | --arguments - Additional arguments for gupload commands. e.g: ${0##*/} -a '-q -o -p 4 -d'.\n + To set some arguments by default, use ${0##*/} -a default='-q -o -p 4 -d'.\n + -fg | --foreground - This will run the job in foreground and show the logs.\n + -in | --include 'pattern' - Only include the files with the given pattern to upload.\n + e.g: ${0##*/} local_folder --include "*1*", will only include with files with pattern '1' in the name.\n + -ex | --exclude 'pattern' - Exclude the files with the given pattern from uploading.\n + e.g: ${0##*/} local_folder --exclude "*1*", will exclude all files with pattern '1' in the name.\n + -c | --command 'command name'- Incase if gupload command installed with any other name or to use in systemd service.\n + --sync-detail-dir 'dirname' - Directory where a job information will be stored. + Default: $HOME/.google-drive-upload\n + -s | --service 'service name' - To generate systemd service file to setup background jobs on boot.\n + -D | --debug - Display script command trace, use before all the flags to see maximum script trace.\n + -h | --help - Display usage instructions.\n" +exit 0 +} +_short_help(){ +printf "No valid arguments provided, use -h/--help flag to see usage.\n" +exit 0 +} +_check_pid(){ +{ ps -p "$1" 2>|/dev/null 1>&2&&return 0;}||return 1 +} +_get_job_info(){ +declare input local_folder pid times extra +pid="$1"&&input="${3:-$(grep "$pid" "$SYNC_LIST"||:)}" +if [[ -n $input ]];then +if times="$(ps -p "$pid" -o etimes --no-headers)";then +printf "\n%s\n" "PID: $pid" +: "${input#*"|:_//_:|"}"&&local_folder="${_%%"|:_//_:|"*}" +printf "Local Folder: %s\n" "$local_folder" +printf "Drive Folder: %s\n" "${input##*"|:_//_:|"}" +printf "Running Since: %s\n" "$(_display_time "$times")" +[[ -n $2 ]]&&{ +extra="$(ps -p "$pid" -o %cpu,%mem --no-headers||:)" +printf "CPU usage:%s\n" "${extra% *}" +printf "Memory usage: %s\n" "${extra##* }" +_setup_loop_variables "$local_folder" "${input##*"|:_//_:|"}" +printf "Success: %s\n" "$(_count <"$SUCCESS_LOG")" +printf "Failed: %s\n" "$(_count <"$ERROR_LOG")" +} +RETURN_STATUS=0 +else +RETURN_STATUS=1 +fi +else +RETURN_STATUS=11 +fi +return 0 +} +_remove_job(){ +declare pid="$1" input local_folder drive_folder new_list +input="$(grep "$pid" "$SYNC_LIST"||:)" +if [ -n "$pid" ];then +: "${input##*"|:_//_:|"}"&&local_folder="${_%%"|:_//_:|"*}" +drive_folder="${input##*"|:_//_:|"}" +new_list="$(grep -v "$pid" "$SYNC_LIST"||:)" +printf "%s\n" "$new_list" >|"$SYNC_LIST" +fi +rm -rf "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-$2}${local_folder_remove_job:-$3}" +{ [[ -z $(find "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-$2}" -type f||:) ]]&&rm -rf "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-$2}";} 2>|/dev/null 1>&2 +return 0 +} +_kill_job(){ +declare pid="$1" +kill -9 "$pid" 2>|/dev/null 1>&2||: +_remove_job "$pid" +printf "Killed.\n" +} +_show_jobs(){ +declare list pid total=0 +list="$(grep -v '^$' "$SYNC_LIST"||:)" +printf "%s\n" "$list" >|"$SYNC_LIST" +while read -r -u 4 line;do +if [[ -n $line ]];then +: "${line%%"|:_//_:|"*}"&&pid="${_##*: }" +_get_job_info "$pid" "$1" "$line" +{ [[ $RETURN_STATUS == 1 ]]&&_remove_job "$pid";}||{ ((total+=1))&&no_task="printf";} +fi +done 4<"$SYNC_LIST" +printf "\nTotal Jobs Running: %s\n" "$total" +[[ -z $1 ]]&&"${no_task:-:}" "For more info: %s -j/--jobs v/verbose\n" "${0##*/}" +return 0 +} +_setup_loop_variables(){ +declare folder="$1" drive_folder="$2" +DIRECTORY="$SYNC_DETAIL_DIR/$drive_folder$folder" +PID_FILE="$DIRECTORY/pid" +SUCCESS_LOG="$DIRECTORY/success_list" +ERROR_LOG="$DIRECTORY/failed_list" +LOGS="$DIRECTORY/logs" +} +_setup_loop_files(){ +mkdir -p "$DIRECTORY" +for file in PID_FILE SUCCESS_LOG ERROR_LOG;do +printf "" >>"${!file}" +done +PID="$(<"$PID_FILE")" +} +_check_and_upload(){ +declare all initial new_files new_file +mapfile -t initial <"$SUCCESS_LOG" +mapfile -t all <<<"$(printf "%s\n%s\n" "$(<"$SUCCESS_LOG")" "$(<"$ERROR_LOG")")" +[[ $(printf "%b\n" ./*) == "./*" ]]&&return 0 +all+=(*) +{ [ -n "$INCLUDE_FILES" ]&&mapfile -t all <<<"$(printf "%s\n" "${all[@]}"|grep -E $INCLUDE_FILES)";}||: +mapfile -t new_files <<<"$(eval grep -vxEf <(printf "%s\n" "${initial[@]}") <(printf "%s\n" "${all[@]}") $EXCLUDE_FILES||:)" +[[ -n ${new_files[*]} ]]&&printf "" >|"$ERROR_LOG"&&{ +declare -A Aseen&&for new_file in "${new_files[@]}";do +{ [[ ${Aseen[new_file]} ]]&&continue;}||Aseen[$new_file]=x +if eval "\"$COMMAND_PATH\"" "\"$new_file\"" "$ARGS";then +printf "%s\n" "$new_file" >>"$SUCCESS_LOG" +else +printf "%s\n" "$new_file" >>"$ERROR_LOG" +printf "%s\n" "Error: Input - $new_file" +fi +printf "\n" +done +} +return 0 +} +_loop(){ +while :;do +_check_and_upload +sleep "$SYNC_TIME_TO_SLEEP" +done +} +_check_existing_loop(){ +_setup_loop_variables "$FOLDER" "$GDRIVE_FOLDER" +_setup_loop_files +if [[ -z $PID ]];then +RETURN_STATUS=0 +elif _check_pid "$PID";then +RETURN_STATUS=1 +else +_remove_job "$PID" +_setup_loop_variables "$FOLDER" "$GDRIVE_FOLDER" +_setup_loop_files +RETURN_STATUS=2 +fi +return 0 +} +_start_new_loop(){ +if [[ -n $FOREGROUND ]];then +printf "%b\n" "Local Folder: $INPUT\nDrive Folder: $GDRIVE_FOLDER\n" +trap '_clear_line 1 && printf "\n" && _remove_job "" "${GDRIVE_FOLDER}" "${FOLDER}"; exit' INT TERM +trap 'printf "Job stopped.\n" ; exit' EXIT +_loop +else +(_loop &>"$LOGS")& +PID="$!" +printf "%s\n" "$PID" >|"$PID_FILE" +printf "%b\n" "Job started.\nLocal Folder: $INPUT\nDrive Folder: $GDRIVE_FOLDER" +printf "%s\n" "PID: $PID" +printf "%b\n" "PID: $PID|:_//_:|$FOLDER|:_//_:|$GDRIVE_FOLDER" >>"$SYNC_LIST" +[[ -n $SHOW_LOGS ]]&&tail -f "$LOGS" +fi +return 0 +} +_do_job(){ +case "${JOB[*]}" in +*SHOW_JOBS*)_show_jobs "${SHOW_JOBS_VERBOSE:-}" +exit +;; +*KILL_ALL*)\ +PIDS="$(_show_jobs|grep -o 'PID:.*[0-9]'|sed "s/PID: //g"||:)"&&\ +total=0 +[[ -n $PIDS ]]&&{ +for _pid in $PIDS;do +printf "PID: %s - " "${_pid##* }" +_kill_job "${_pid##* }" +((total+=1)) +done +} +printf "\nTotal Jobs Killed: %s\n" "$total" +exit +;; +*PIDS*)for pid in "${ALL_PIDS[@]}" +do +[[ $JOB_TYPE =~ INFO ]]&&{ +_get_job_info "$pid" more +[[ $RETURN_STATUS -gt 0 ]]&&{ +[[ $RETURN_STATUS == 1 ]]&&_remove_job "$pid" +printf "No job running with given PID ( %s ).\n" "$pid" 1>&2 +} +} +[[ $JOB_TYPE =~ SHOW_LOGS ]]&&{ +input="$(grep "$pid" "$SYNC_LIST"||:)" +if [[ -n $input ]];then +_check_pid "$pid"&&{ +: "${input#*"|:_//_:|"}"&&local_folder="${_/"|:_//_:|"*/}" +_setup_loop_variables "$local_folder" "${input/*"|:_//_:|"/}" +tail -f "$LOGS" +} +else +printf "No job running with given PID ( %s ).\n" "$pid" 1>&2 +fi +} +[[ $JOB_TYPE =~ KILL ]]&&{ +_get_job_info "$pid" +if [[ $RETURN_STATUS == 0 ]];then +_kill_job "$pid" +else +[[ $RETURN_STATUS == 1 ]]&&_remove_job "$pid" +printf "No job running with given PID ( %s ).\n" "$pid" 1>&2 +fi +} +done +[[ $JOB_TYPE =~ (INFO|SHOW_LOGS|KILL) ]]&&exit 0 +esac +return 0 +} +_setup_arguments(){ +[[ $# == 0 ]]&&printf "%s: Missing arguments\n" "${FUNCNAME[0]}"&&return 1 +unset SYNC_TIME_TO_SLEEP ARGS COMMAND_NAME DEBUG GDRIVE_FOLDER KILL SHOW_LOGS +COMMAND_NAME="gupload" +_check_longoptions(){ +[[ -z $2 ]]&&printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' \ +"${0##*/}" "$1" "${0##*/}"&&exit 1 +return 0 +} +while [[ $# -gt 0 ]];do +case "$1" in +-h|--help)_usage;; +-D|--debug)DEBUG="true"&&export DEBUG&&_check_debug;; +-d|--directory)_check_longoptions "$1" "$2" +GDRIVE_FOLDER="$2"&&shift +ARGS+=" -C \"$GDRIVE_FOLDER\" " +;; +-j|--jobs)[[ $2 == v* ]]&&\ +SHOW_JOBS_VERBOSE="true"&&shift +JOB=(SHOW_JOBS) +;; +-p|--pid)_check_longoptions "$1" "$2" +if [[ $2 -gt 0 ]];then +ALL_PIDS+=("$2")&&shift +JOB+=(PIDS) +else +printf "-p/--pid only takes postive integer as arguments.\n" +exit 1 +fi +;; +-i|--info)JOB_TYPE+="INFO"&&INFO="true";; +-k|--kill)\ +JOB_TYPE+="KILL"&&\ +KILL="true" +[[ $2 == all ]]&&JOB=(KILL_ALL)&&shift +;; +-l|--logs)JOB_TYPE+="SHOW_LOGS"&&SHOW_LOGS="true";; +-t|--time)_check_longoptions "$1" "$2" +if [[ $2 -gt 0 ]];then +[[ $2 == default* ]]&&UPDATE_DEFAULT_TIME_TO_SLEEP="_update_config" +TO_SLEEP="${2/default=/}"&&shift +else +printf "-t/--time only takes positive integers as arguments, min = 1, max = infinity.\n" +exit 1 +fi +;; +-a|--arguments)_check_longoptions "$1" "$2" +[[ $2 == default* ]]&&UPDATE_DEFAULT_ARGS="_update_config" +ARGS+="${2/default=/} "&&shift +;; +-fg|--foreground)FOREGROUND="true"&&SHOW_LOGS="true";; +-in|--include)_check_longoptions "$1" "$2" +INCLUDE_FILES="$INCLUDE_FILES -e '$2' "&&shift +;; +-ex|--exclude)_check_longoptions "$1" "$2" +EXCLUDE_FILES="$EXCLUDE_FILES -e '$2' "&&shift +;; +-c|--command)_check_longoptions "$1" "$2" +CUSTOM_COMMAND_NAME="$2"&&shift +;; +--sync-detail-dir)_check_longoptions "$1" "$2" +SYNC_DETAIL_DIR="$2"&&shift +;; +-s|--service)_check_longoptions "$1" "$2" +SERVICE_NAME="$2"&&shift +CREATE_SERVICE="true" +;; +*)if +[[ $1 == -* ]] +then +printf '%s: %s: Unknown option\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "$1" "${0##*/}"&&exit 1 +else +FINAL_INPUT_ARRAY+=("$1") +fi +esac +shift +done +INFO_PATH="$HOME/.google-drive-upload" +CONFIG_INFO="$INFO_PATH/google-drive-upload.configpath" +[[ -f $CONFIG_INFO ]]&&. "$CONFIG_INFO" +CONFIG="${CONFIG:-$HOME/.googledrive.conf}" +SYNC_DETAIL_DIR="${SYNC_DETAIL_DIR:-$INFO_PATH/sync}" +SYNC_LIST="$SYNC_DETAIL_DIR/sync_list" +mkdir -p "$SYNC_DETAIL_DIR"&&printf "" >>"$SYNC_LIST" +_do_job +[[ -z ${FINAL_INPUT_ARRAY[*]} ]]&&_short_help +return 0 +} +_config_variables(){ +COMMAND_NAME="${CUSTOM_COMMAND_NAME:-$COMMAND_NAME}" +VALUES_LIST="REPO COMMAND_NAME SYNC_COMMAND_NAME INSTALL_PATH TYPE TYPE_VALUE" +VALUES_REGEX=""&&for i in $VALUES_LIST;do +VALUES_REGEX="${VALUES_REGEX:+$VALUES_REGEX|}^$i=\".*\".* # added values" +done +{ +COMMAND_PATH="$(command -v "$COMMAND_NAME")" 1>/dev/null&&SCRIPT_VALUES="$(grep -E "$VALUES_REGEX|^SELF_SOURCE=\".*\"" "$COMMAND_PATH"||:)"&&eval "$SCRIPT_VALUES"&&[[ -n ${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+$TYPE_VALUE}}}} ]]&&: +}||{ printf "Error: %s is not installed, use -c/--command to specify.\n" "$COMMAND_NAME" 1>&2&&exit 1;} +ARGS+=" -q " +SYNC_TIME_TO_SLEEP="3" +[[ -r $CONFIG ]]&&. "$CONFIG" +SYNC_TIME_TO_SLEEP="${TO_SLEEP:-$SYNC_TIME_TO_SLEEP}" +ARGS+=" ${SYNC_DEFAULT_ARGS:-} " +"${UPDATE_DEFAULT_ARGS:-:}" SYNC_DEFAULT_ARGS " $ARGS " "$CONFIG" +"${UPDATE_DEFAULT_TIME_TO_SLEEP:-:}" SYNC_TIME_TO_SLEEP "$SYNC_TIME_TO_SLEEP" "$CONFIG" +return 0 +} +_systemd_service_contents(){ +declare username="${LOGNAME:?Give username}" install_path="${INSTALL_PATH:?Missing install path}" \ +cmd="${COMMAND_NAME:?Missing command name}" sync_cmd="${SYNC_COMMAND_NAME:?Missing gsync cmd name}" \ +all_argumnets="${ALL_ARGUMNETS:-}" +printf "%s\n" '# Systemd service file - start +[Unit] +Description=google-drive-upload synchronisation service +After=network.target + +[Service] +Type=simple +User='"$username"' +Restart=on-abort +RestartSec=3 +ExecStart="'"$install_path/$sync_cmd"'" --foreground --command "'"$install_path/$cmd"'" --sync-detail-dir "/tmp/sync" '"$all_argumnets"' + +# Security +PrivateTmp=true +ProtectSystem=full +NoNewPrivileges=true +ProtectControlGroups=true +ProtectKernelModules=true +ProtectKernelTunables=true +PrivateDevices=true +RestrictAddressFamilies=AF_INET AF_INET6 AF_NETLINK +RestrictNamespaces=true +RestrictRealtime=true +SystemCallArchitectures=native + +[Install] +WantedBy=multi-user.target +# Systemd service file - end' +} +_systemd_service_script(){ +declare name="${1:?Missing service name}" script_name script \ +service_file_contents="${2:?Missing service file contents}" +script_name="${3:?Missing script name}" +script='#!/usr/bin/env bash +set -e + +_usage() { + printf "%b" "# Service name: '"'$name'"' + +# Print the systemd service file contents +bash \"${0##*/}\" print\n +# Add service to systemd files ( this must be run before doing any of the below ) +bash \"${0##*/}\" add\n +# Start or Stop the service +bash \"${0##*/}\" start / stop\n +# Enable or Disable as a boot service: +bash \"${0##*/}\" enable / disable\n +# See logs +bash \"${0##*/}\" logs\n +# Remove the service from system +bash \"${0##*/}\" remove\n\n" + + _status + exit 0 +} + +_status() { + declare status current_status + status="$(systemctl status '"'$name'"' 2>&1 || :)" + current_status="$(printf "%s\n" "${status}" | env grep -E "●.*|(Loaded|Active|Main PID|Tasks|Memory|CPU): .*" || :)" + + printf "%s\n" "Current status of service: ${current_status:-${status}}" + return 0 +} + +unset TMPFILE + +[[ $# = 0 ]] && _usage + +CONTENTS='"'$service_file_contents'"' + +_add_service() { + declare service_file_path="/etc/systemd/system/'"$name"'.service" + printf "%s\n" "Service file path: ${service_file_path}" + if [[ -f ${service_file_path} ]]; then + printf "%s\n" "Service file already exists. Overwriting" + sudo mv "${service_file_path}" "${service_file_path}.bak" || exit 1 + printf "%s\n" "Existing service file was backed up." + printf "%s\n" "Old service file: ${service_file_path}.bak" + else + [[ -z ${TMPFILE} ]] && { + { { command -v mktemp 1>|/dev/null && TMPFILE="$(mktemp -u)"; } || + TMPFILE="${PWD}/.$(_t="$(printf "%(%s)T\\n" "-1")" && printf "%s\n" "$((_t * _t))").LOG"; } || exit 1 + } + export TMPFILE + trap "exit" INT TERM + _rm_tmpfile() { rm -f "${TMPFILE:?}" ; } + trap "_rm_tmpfile" EXIT + trap "" TSTP # ignore ctrl + z + + { printf "%s\n" "${CONTENTS}" >|"${TMPFILE}" && sudo cp "${TMPFILE}" /etc/systemd/system/'"$name"'.service; } || + { printf "%s\n" "Error: Failed to add service file to system." && exit 1 ;} + fi + sudo systemctl daemon-reload || printf "%s\n" "Could not reload the systemd daemon." + printf "%s\n" "Service file was successfully added." + return 0 +} + +_service() { + declare service_name='"'$name'"' action="${1:?}" service_file_path + service_file_path="/etc/systemd/system/${service_name}.service" + printf "%s\n" "Service file path: ${service_file_path}" + [[ -f ${service_file_path} ]] || { printf "%s\n" "Service file does not exist." && exit 1; } + sudo systemctl daemon-reload || exit 1 + case "${action}" in + log*) sudo journalctl -u "${service_name}" -f ;; + rm | remove) + sudo systemctl stop "${service_name}" || : + if sudo rm -f /etc/systemd/system/"${service_name}".service; then + sudo systemctl daemon-reload || : + printf "%s\n" "Service removed." && return 0 + else + printf "%s\n" "Error: Cannot remove." && exit 1 + fi + ;; + *) + declare success="${2:?}" error="${3:-}" + if sudo systemctl "${action}" "${service_name}"; then + printf "%s\n" "Success: ${service_name} ${success}." && return 0 + else + printf "%s\n" "Error: Cannot ${action} ${service_name} ${error}." && exit 1 + fi + ;; + esac + return 0 +} + +while [[ "${#}" -gt 0 ]]; do + case "${1}" in + print) printf "%s\n" "${CONTENTS}" ;; + add) _add_service ;; + start) _service start started ;; + stop) _service stop stopped ;; + enable) _service enable "boot service enabled" "boot service" ;; + disable) _service disable "boot service disabled" "boot service" ;; + logs) _service logs ;; + remove) _service rm ;; + *) printf "%s\n" "Error: No valid options provided." && _usage ;; + esac + shift +done' +printf "%s\n" "$script" >|"$script_name" +return 0 +} +_process_arguments(){ +declare current_folder&&declare -A Aseen +for INPUT in "${FINAL_INPUT_ARRAY[@]}";do +{ [[ ${Aseen[$INPUT]} ]]&&continue;}||Aseen[$INPUT]=x +! [[ -d $INPUT ]]&&printf "\nError: Invalid Input ( %s ), no such directory.\n" "$INPUT"&&continue +current_folder="$(pwd)" +FOLDER="$(cd "$INPUT"&&pwd)"||exit 1 +[[ -n $DEFAULT_ACCOUNT ]]&&_set_value indirect ROOT_FOLDER_NAME "ACCOUNT_${DEFAULT_ACCOUNT}_ROOT_FOLDER_NAME" +GDRIVE_FOLDER="${GDRIVE_FOLDER:-${ROOT_FOLDER_NAME:-Unknown}}" +[[ -n $CREATE_SERVICE ]]&&{ +ALL_ARGUMNETS="\"$FOLDER\" ${TO_SLEEP:+-t \"$TO_SLEEP\"} -a \"${ARGS// / }\"" +num="${num+$((num+=1))}" +service_name="gsync-$SERVICE_NAME${num:+_$num}" +script_name="$service_name.service.sh" +_systemd_service_script "$service_name" "$(_systemd_service_contents)" "$script_name" +_print_center "normal" "=" "=" +bash "$script_name" +_print_center "normal" "=" "=" +continue +} +cd "$FOLDER"||exit 1 +_check_existing_loop +case "$RETURN_STATUS" in +0|2)_start_new_loop;; +1)printf "%b\n" "Job is already running.." +if [[ -n $INFO ]];then +_get_job_info "$PID" more "PID: $PID|:_//_:|$FOLDER|:_//_:|$GDRIVE_FOLDER" +else +printf "%b\n" "Local Folder: $INPUT\nDrive Folder: $GDRIVE_FOLDER" +printf "%s\n" "PID: $PID" +fi +[[ -n $KILL ]]&&_kill_job "$PID"&&exit +[[ -n $SHOW_LOGS ]]&&tail -f "$LOGS" +esac +cd "$current_folder"||exit 1 +done +return 0 +} +main(){ +[[ $# == 0 ]]&&_short_help +set -o noclobber -o pipefail +[[ -z $SELF_SOURCE ]]&&{ +UTILS_FOLDER="${UTILS_FOLDER:-$PWD}" +{ . "$UTILS_FOLDER"/bash/common-utils.bash&&. "$UTILS_FOLDER"/common/common-utils.sh;}||{ printf "Error: Unable to source util files.\n"&&exit 1;} +} +trap '' TSTP +_setup_arguments "$@" +_check_debug +_config_variables +_process_arguments +} +main "$@" diff --git a/release/bash/gupload b/release/bash/gupload new file mode 100755 index 0000000..adf5807 --- /dev/null +++ b/release/bash/gupload @@ -0,0 +1,1964 @@ +#!/usr/bin/env bash +SELF_SOURCE="true" +set -a +_assert_regex(){ +declare pattern="${1:?Error: Missing pattern}" string="${2:?Missing string}" +if [[ $string =~ $pattern ]];then +return 0 +else +return 1 +fi +} +cat(){ +for file in "$@";do +printf "%s\n" "$(<"$file")" +done +} +_count(){ +mapfile -tn 0 lines +printf '%s\n' "${#lines[@]}" +} +_epoch(){ +printf '%(%s)T\n' "-1" +} +_required_column_size(){ +shopt -s checkwinsize&&(:&&:) +if [[ $COLUMNS -gt 45 ]];then +trap 'shopt -s checkwinsize; (:;:)' SIGWINCH +return 0 +else +return 1 +fi +} +_set_value(){ +case "${1:?}" in +d|direct)export "${2:?}=$3";; +i|indirect)export "${2:?}=${!3}";; +*)return 1 +esac +} +_trim(){ +declare char="$1" str="$2" var="$3" +if [[ -n $var ]];then +_set_value d "$var" "${str//$char/}" +else +printf "%s" "${str//$char/}" +fi +} +_url_encode(){ +declare LC_ALL=C +for ((i=0; i<${#1}; i++));do +: "${1:i:1}" +case "$_" in +[a-zA-Z0-9.~_-])printf '%s' "$_" +;; +*)printf '%%%02X' "'$_" +esac +done 2>|/dev/null +printf '\n' +} +_auto_update(){ +export COMMAND_NAME INSTALL_PATH TYPE TYPE_VALUE REPO LAST_UPDATE_TIME AUTO_UPDATE_INTERVAL +command -v "$COMMAND_NAME" 1>/dev/null&&if [ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+$TYPE_VALUE}}}}" ];then +current_time="$(_epoch)" +[ "$((LAST_UPDATE_TIME+AUTO_UPDATE_INTERVAL))" -lt "$(_epoch)" ]&&_update update +_update_value LAST_UPDATE_TIME "$current_time" +fi +return 0 +} +_update(){ +job_update="${1:-update}" +[ "${GLOBAL_INSTALL:-}" = true ]&&! [ "$(id -u)" = 0 ]&&printf "%s\n" "Error: Need root access to update."&&return 0 +[ "$job_update" = uninstall ]&&job_uninstall="--uninstall" +_print_center "justify" "Fetching $job_update script.." "-" +repo_update="${REPO:-labbots/google-drive-upload}" type_value_update="${TYPE_VALUE:-latest}" cmd_update="${COMMAND_NAME:-gupload}" path_update="${INSTALL_PATH:-$HOME/.gdrive-downloader/bin}" +{ [ "${TYPE:-}" != branch ]&&type_value_update="$(_get_latest_sha release "$type_value_update" "$repo_update")";}||: +if script_update="$(curl --compressed -Ls "https://github.com/$repo_update/raw/$type_value_update/install.sh")";then +_clear_line 1 +printf "%s\n" "$script_update"|sh -n||{ +printf "%s\n" "Install script downloaded but malformed, try again and if the issue persists open an issue on github." +return 1 +} +printf "%s\n" "$script_update"|sh -s -- ${job_uninstall:-} --skip-internet-check --cmd "$cmd_update" --path "$path_update" +current_time="$(date +'%s')" +[ -z "$job_uninstall" ]&&_update_value LAST_UPDATE_TIME "$current_time" +else +_clear_line 1 +"${QUIET:-_print_center}" "justify" "Error: Cannot download" " $job_update script." "=" 1>&2 +return 1 +fi +return 0 +} +_update_value(){ +command_path="${INSTALL_PATH:?}/${COMMAND_NAME:?}" +value_name="${1:?}" value="${2:-}" +script_without_value_and_shebang="$(grep -v "$value_name=\".*\".* # added values" -- "$command_path"|sed 1d)" +new_script="$(sed -n 1p -- "$command_path" +printf "%s\n" "$value_name=\"$value\" # added values" +printf "%s\n" "$script_without_value_and_shebang")" +printf "%s\n" "$new_script"|"${INSTALLATION:-bash}" -n||{ +printf "%s\n" "Update downloaded but malformed, try again and if the issue persists open an issue on github." +return 1 +} +chmod u+w -- "$command_path"&&printf "%s\n" "$new_script" >|"$command_path"&&chmod "a-w-r-x,${PERM_MODE:-u}+r+x" -- "$command_path" +return 0 +} +_is_fd_open(){ +for fd in ${1:?};do +if ! { true >&"$fd";} 2<>/dev/null;then +printf "%s\n" "Error: fd $fd not open." +return 1 +fi +done +} +_parser_add_help(){ +_PARSER_ALL_HELP="$_PARSER_ALL_HELP +${__PARSER_BAR:-} +${1:-}" 2>|/dev/null +} +_parser_check_arguments(){ +nargs_parser_check_arguments="$((${1:?_parser_check_arguments}))" +num_parser_check_arguments=$(($#-2)) +[ "$num_parser_check_arguments" -lt "$nargs_parser_check_arguments" ]&&{ +printf "%s\n" "${0##*/}: $2: flag requires $nargs_parser_check_arguments argument." +printf "\n%s\n" "Help:" +printf "%s\n" "$(_usage "$2")" +exit 1 +} +return 0 +} +_flag_exists(){ +tmp_flag_exists="" option_flag_exists="" +_flag_help "${1:?}" tmp_flag_exists option_flag_exists +[ -z "$tmp_flag_exists" ]&&return 1 +_set_value d "${2:?}" "$option_flag_exists" +} +_flag_help(){ +flag_flag_help="" +_trim "-" "${1:?_flag_help}" flag_flag_help +_set_value i "${2:?_flag_help}" "_parser__help_$flag_flag_help" +_set_value d "${3:-_}" "$flag_flag_help" +} +_parse_arguments(){ +__NEWLINE=" +" +_parse_support_ansi_escapes(){ +case "$TERM" in +xterm*|rxvt*|urxvt*|linux*|vt*|screen*){ [ -t 2 ]&&return 0;}||return 1;; +*): +esac +{ [ -t 2 ]&&return 0;}||return 1 +} +_parser_required_column_size(){ +COLUMNS="$({ command -v bash 1>|/dev/null&&bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1';}||{ command -v zsh 1>|/dev/null&&zsh -c 'printf "%s\n" "${COLUMNS}"';}||{ command -v stty 1>|/dev/null&&_tmp="$(stty size)"&&printf "%s\n" "${_tmp##* }";}||{ command -v tput 1>|/dev/null&&tput cols;})"||: +[ "$((COLUMNS))" -gt 45 ]&&return 0 +} +_parse_support_ansi_escapes&&_parser_required_column_size&&__PARSER_BAR="$(\ +filler='' \ +symbol='_' +i=1&&while [ "$i" -le "$COLUMNS" ];do +filler="$filler$symbol"&&i="$((i+1))" +done +printf "%s\n" "$filler")" +__PARSER_BAR="${__PARSER_BAR:+$__PARSER_BAR$__NEWLINE}" +unset _PARSER_ALL_HELP _PARSER_ARGS_SHIFT _PARSER_PREPROCESS_FUNCTION +unset _PARSER_FLAGS _PARSER_CURRENT_FLAGS _PARSER_CURRENT_NARGS _PARSER_CURRENT_ARGS _PARSER_CURRENT_ARGS_TYPE +"${1:?_parse_arguments - 1: Missing funtion name to setup flags}"||return 1 +shift 2>|/dev/null +_parser_run_preprocess||return 1 +while [ "$#" -gt 0 ];do +case "$1" in +''):;; +--)shift +while [ "$#" -gt 0 ];do +_parser_process_input "$@"||return 1 +shift +done +;; +-*)\ +flag_parse_arguments="" +if _flag_exists "$1" flag_parse_arguments;then +"_parser_process_$flag_parse_arguments" "$@"||return 1 +else +printf "%s\n\n" "${0##*/}: $1: Unknown option" +_short_help +fi +;; +*)_parser_process_input "$@"||return 1 +esac +_PARSER_ARGS_SHIFT="$((_PARSER_ARGS_SHIFT+1))" +shift "$_PARSER_ARGS_SHIFT" +_PARSER_ARGS_SHIFT="0" +done +return 0 +} +_parser_setup_flag(){ +_PARSER_CURRENT_FLAGS="" tmp_parser_setup_flag="" +_PARSER_FLAGS="${1:?_parser_setup_flag}" +for f in $_PARSER_FLAGS;do +_trim "-" "$f" tmp_parser_setup_flag +_PARSER_CURRENT_FLAGS="$_PARSER_CURRENT_FLAGS $tmp_parser_setup_flag" +done +_PARSER_CURRENT_NARGS="${2:?_parser_setup_flag}" +_PARSER_CURRENT_ARGS_TYPE="$3" +_PARSER_CURRENT_ARGS="$4" +} +_parser_setup_flag_help(){ +flags_parser_setup_flag_help="${_PARSER_CURRENT_FLAGS:?_parser_setup_flag_help}" +nargs_parser_setup_flag_help="${_PARSER_CURRENT_NARGS:?_parser_setup_flag_help}" +unset start_parser_setup_flag_help \ +help_parser_setup_flag_help \ +arg_parser_setup_flag_help \ +all_parser_setup_flag_help +while IFS= read -r line <&4;do +help_parser_setup_flag_help="$help_parser_setup_flag_help + $line" +done 4<|/dev/null +} +_parser_setup_flag_preprocess(){ +_is_fd_open 4||return 1 +unset fn_parser_setup_flag_preprocess +while IFS= read -r line <&4;do +fn_parser_setup_flag_preprocess="$fn_parser_setup_flag_preprocess +$line" +done +_PARSER_PREPROCESS_FUNCTION="$_PARSER_PREPROCESS_FUNCTION +$fn_parser_setup_flag_preprocess" +} +_parser_setup_flag_process(){ +_is_fd_open 4||return 1 +unset fn_parser_setup_flag_process +if [ "${_PARSER_CURRENT_NARGS:?_parser_setup_flag_process}" -gt 0 ]&&! [ "$_PARSER_CURRENT_ARGS_TYPE" = optional ];then +fn_parser_setup_flag_process="_parser_check_arguments ${_PARSER_CURRENT_NARGS:?_parser_setup_flag_process} \"\${@}\"" +fi +while IFS= read -r line <&4;do +fn_parser_setup_flag_process="$fn_parser_setup_flag_process +$line" +done +for f in ${_PARSER_CURRENT_FLAGS:?_parser_setup_flag_process};do +eval "_parser_process_$f() { $fn_parser_setup_flag_process ; }" +done +} +_parser_run_preprocess(){ +eval "_parser_preprocess_setup() { ${_PARSER_PREPROCESS_FUNCTION:-:} ; }"&&_parser_preprocess_setup +} +_parser_shift(){ +export _PARSER_ARGS_SHIFT="${1:-1}" +} +_short_help(){ +printf "No valid arguments provided, use -h/--help flag to see usage.\n" +exit 0 +} +_set_value(){ +case "${1:?}" in +d|direct)export "${2:?}=$3";; +i|indirect)eval export "$2"=\"\$"$3"\";; +*)return 1 +esac +} +_trim(){ +char_trim="$1" str_trim="$2" var_trim="$3" +set -f +old_ifs="$IFS" +IFS="$char_trim" +set -- $str_trim +IFS= +if [ -n "$var_trim" ];then +_set_value d "$var_trim" "$*" +else +printf "%s" "$*" +fi +IFS="$old_ifs" +set +f +} +_parser_setup_flags(){ +_parser_add_help " +The script can be used to upload file/directory to google drive. + +Usage: +${0##*/} [options.. ] + +Foldername argument is optional. If not provided, the file will be uploaded to preconfigured google drive root folder. + +File name argument is optional if create directory option is used. + +Options:" +_parser_setup_flag "input" 0 +_parser_setup_flag_help \ +"Input files or drive ids to process." +_parser_setup_flag_preprocess 4<<'EOF' +unset TOTAL_ID_INPUTS TOTAL_FILE_INPUTS +EOF +_parser_setup_flag_process 4<<'EOF' +# set INPUT_FILE|ID_num to the input, where num is rank of input +case "${1}" in + *drive.google.com* | *docs.google.com*) _set_value d "INPUT_ID_$((TOTAL_ID_INPUTS += 1))" "$(_extract_id "${1}")" ;; + *) + [ -r "${1}" ] || { + { "${QUIET:-_print_center}" 'normal' "[ Error: Invalid File - ${1} ]" "=" && printf "\n"; } 1>&2 + return + } + _set_value d "INPUT_FILE_$((TOTAL_FILE_INPUTS += 1))" "${1}" + ;; +esac +EOF +_parser_setup_flag "-a --account" 1 required "account name" +_parser_setup_flag_help \ +"Use a different account than the default one. + +To change the default account name, use this format, -a/--account default=account_name" +_parser_setup_flag_preprocess 4<<'EOF' +unset OAUTH_ENABLED ACCOUNT_NAME ACCOUNT_ONLY_RUN CUSTOM_ACCOUNT_NAME UPDATE_DEFAULT_ACCOUNT +EOF +_parser_setup_flag_process 4<<'EOF' +export OAUTH_ENABLED="true" CUSTOM_ACCOUNT_NAME="${2##default=}" +[ -z "${2##default=*}" ] && export UPDATE_DEFAULT_ACCOUNT="_update_config" +_parser_shift +EOF +_parser_setup_flag "-la --list-accounts" 0 +_parser_setup_flag_help \ +"Print all configured accounts in the config files." +_parser_setup_flag_preprocess 4<<'EOF' +unset LIST_ACCOUNTS +EOF +_parser_setup_flag_process 4<<'EOF' +export LIST_ACCOUNTS="true" +EOF +_parser_setup_flag "-ca --create-account" 1 required "account name" +_parser_setup_flag_help \ +"To create a new account with the given name if does not already exists." +_parser_setup_flag_preprocess 4<<'EOF' +unset OAUTH_ENABLED NEW_ACCOUNT_NAME +EOF +_parser_setup_flag_process 4<<'EOF' +export OAUTH_ENABLED="true" +export NEW_ACCOUNT_NAME="${2}" && _parser_shift +EOF +_parser_setup_flag "-da --delete-account" 1 required "account name" +_parser_setup_flag_help \ +"To delete an account information from config file." +_parser_setup_flag_preprocess 4<<'EOF' +unset DELETE_ACCOUNT_NAME +EOF +_parser_setup_flag_process 4<<'EOF' +export DELETE_ACCOUNT_NAME="${2}" && _parser_shift +EOF +_parser_setup_flag "-c -C --create-dir" 1 required "foldername" +_parser_setup_flag_help \ +"Option to create directory. Will print folder id. Can be used to provide input folder, see README." +_parser_setup_flag_preprocess 4<<'EOF' +unset FOLDERNAME +EOF +_parser_setup_flag_process 4<<'EOF' +export FOLDERNAME="${2}" && _parser_shift +EOF +_parser_setup_flag "-r --root-dir" 1 required "google folder id or folder url containing id" +_parser_setup_flag_help \ +"Google folder ID/URL to which the file/directory is going to upload. +If you want to change the default value, then use this format, -r/--root-dir default=root_folder_id/root_folder_url" +_parser_setup_flag_preprocess 4<<'EOF' +unset ROOTDIR UPDATE_DEFAULT_ROOTDIR +EOF +_parser_setup_flag_process 4<<'EOF' +ROOTDIR="${2##default=}" +[ -z "${2##default=*}" ] && export UPDATE_DEFAULT_ROOTDIR="_update_config" +_parser_shift +EOF +_parser_setup_flag "-s --skip-subdirs" 0 +_parser_setup_flag_help \ +"Skip creation of sub folders and upload all files inside the INPUT folder/sub-folders in the INPUT folder, use this along with -p/--parallel option to speed up the uploads." +_parser_setup_flag_preprocess 4<<'EOF' +unset SKIP_SUBDIRS +EOF +_parser_setup_flag_process 4<<'EOF' +export SKIP_SUBDIRS="true" +EOF +_parser_setup_flag "-p --parallel" 1 required "no of files to parallely upload" +_parser_setup_flag_help \ +"Upload multiple files in parallel, Max value = 10." +_parser_setup_flag_preprocess 4<<'EOF' +unset NO_OF_PARALLEL_JOBS PARALLEL_UPLOAD +EOF +_parser_setup_flag_process 4<<'EOF' +if [ "${2}" -gt 0 ] 2>| /dev/null 1>&2; then + export NO_OF_PARALLEL_JOBS="${2}" +else + printf "\nError: -p/--parallel accepts values between 1 to 10.\n" + return 1 +fi +export PARALLEL_UPLOAD="parallel" && _parser_shift +EOF +_parser_setup_flag "-cl --clone" 1 required "gdrive id or link" +_parser_setup_flag_help \ +"Upload a gdrive file without downloading." +_parser_setup_flag_preprocess 4<<'EOF' +unset TOTAL_ID_INPUTS +EOF +_parser_setup_flag_process 4<<'EOF' +# set INPUT_FILE|ID_num to the input, where num is rank of input +case "${1}" in + *drive.google.com* | *docs.google.com*) _set_value d "INPUT_ID_$((TOTAL_ID_INPUTS += 1))" "$(_extract_id "${1}")" ;; +esac +_parser_shift +EOF +_parser_setup_flag "-o --overwrite" 0 +_parser_setup_flag_help \ +"Overwrite the files with the same name, if present in the root folder/input folder, also works with recursive folders." +_parser_setup_flag_preprocess 4<<'EOF' +unset OVERWRITE UPLOAD_MODE +EOF +_parser_setup_flag_process 4<<'EOF' +export OVERWRITE="Overwrite" UPLOAD_MODE="update" +EOF +_parser_setup_flag "-d --skip-duplicates" 0 +_parser_setup_flag_help \ +"Do not upload the files with the same name and size, if already present in the root folder/input folder, also works with recursive folders." +_parser_setup_flag_preprocess 4<<'EOF' +unset SKIP_DUPLICATES UPLOAD_MODE +EOF +_parser_setup_flag_process 4<<'EOF' +export SKIP_DUPLICATES="Skip Existing" UPLOAD_MODE="update" +EOF +_parser_setup_flag "-cm --check-mode" 1 required "size or md5" +_parser_setup_flag_help \ +"Additional flag for --overwrite and --skip-duplicates flag. Can be used to change check mode in those flags, available args are 'size' and 'md5'." +_parser_setup_flag_preprocess 4<<'EOF' +unset CHECK_MODE +EOF +_parser_setup_flag_process 4<<'EOF' +case "${2}" in + size) export CHECK_MODE="2" && _parser_shift ;; + md5) export CHECK_MODE="3" && _parser_shift ;; + *) printf "\nError: -cm/--check-mode takes size and md5 as argument.\n" ;; +esac +EOF +_parser_setup_flag "-desc --description --description-all" 1 required "description of file" +_parser_setup_flag_help \ +"Specify description for the given file. To use the respective metadata of a file, below is the format: + +File name ( fullname ): %f | Size: %s | Mime Type: %m + +Now to actually use it: --description 'Filename: %f, Size: %s, Mime: %m' + +Note: For files inside folders, use --description-all flag." +_parser_setup_flag_preprocess 4<<'EOF' +unset DESCRIPTION DESCRIPTION_ALL +EOF +_parser_setup_flag_process 4<<'EOF' +[ "${1}" = "--description-all" ] && export DESCRIPTION_ALL="true" +export DESCRIPTION="${2}" && _parser_shift +EOF +_parser_setup_flag "-S --share" 1 required "email address" +_parser_setup_flag_help \ +"Share the uploaded input file/folder, grant reader permission to provided email address or to everyone with the shareable link." +_parser_setup_flag_preprocess 4<<'EOF' +unset SHARE EMAIL_REGEX SHARE_EMAIL +EOF +_parser_setup_flag_process 4<<'EOF' +SHARE="_share_id" +EMAIL_REGEX="^(([A-Za-z0-9]+((\.|\-|\_|\+)?[A-Za-z0-9]?)*[A-Za-z0-9]+)|[A-Za-z0-9]+)@(([A-Za-z0-9]+)+((\.|\-|\_)?([A-Za-z0-9]+)+)*)+\.([A-Za-z]{2,})+$" +case "${2}" in + -* | '') : ;; + *) + if _assert_regex "${EMAIL_REGEX}" "${2}"; then + SHARE_EMAIL="${2}" && _parser_shift && export SHARE_EMAIL + fi + ;; +esac +SHARE_ROLE="${SHARE_ROLE:-reader}" +EOF +_parser_setup_flag "-SM -sm --share-mode" 1 required "share mode - r/w/c" +_parser_setup_flag_help \ +"Specify the share mode for sharing file. + + Share modes are: r / reader - Read only permission. + + : w / writer - Read and write permission. + + : c / commenter - Comment only permission. + +Note: Although this flag is independent of --share flag but when email is needed, then --share flag use is neccessary." +_parser_setup_flag_preprocess 4<<'EOF' +unset SHARE_ROLE SHARE +EOF +_parser_setup_flag_process 4<<'EOF' +case "${2}" in + r | read*) SHARE_ROLE="reader" ;; + w | write*) SHARE_ROLE="writer" ;; + c | comment*) SHARE_ROLE="commenter" ;; + *) + printf "%s\n" "Invalid share mode given ( ${2} ). Supported values are r or reader / w or writer / c or commenter." && + exit 1 + ;; +esac +SHARE="_share_id" +_parser_shift +EOF +_parser_setup_flag "--speed" 1 required "speed" +_parser_setup_flag_help \ +"Limit the download speed, supported formats: 1K, 1M and 1G." +_parser_setup_flag_preprocess 4<<'EOF' +unset CURL_SPEED +EOF +_parser_setup_flag_process 4<<'EOF' +_tmp_regex='^([0-9]+)([k,K]|[m,M]|[g,G])+$' +if _assert_regex "${_tmp_regex}" "${2}"; then + export CURL_SPEED="--limit-rate ${2}" && _parser_shift +else + printf "Error: Wrong speed limit format, supported formats: 1K , 1M and 1G\n" 1>&2 + exit 1 +fi +EOF +_parser_setup_flag "-i --save-info" 1 required "file where to save info" +_parser_setup_flag_help \ +"Save uploaded files info to the given filename." +_parser_setup_flag_preprocess 4<<'EOF' +unset LOG_FILE_ID +EOF +_parser_setup_flag_process 4<<'EOF' +export LOG_FILE_ID="${2}" && _parser_shift +EOF +_parser_setup_flag "-z --config" 1 required "config path" +_parser_setup_flag_help \ +"Override default config file with custom config file. +If you want to change default value, then use this format -z/--config default=default=your_config_file_path." +_parser_setup_flag_preprocess 4<<'EOF' +unset UPDATE_DEFAULT_CONFIG +_check_config() { + [ -z "${1##default=*}" ] && export UPDATE_DEFAULT_CONFIG="_update_config" + { [ -r "${2}" ] && CONFIG="${2}"; } || { + printf "Error: Given config file (%s) doesn't exist/not readable,..\n" "${1}" 1>&2 && exit 1 + } + return 0 +} +EOF +_parser_setup_flag_process 4<<'EOF' +_check_config "${2}" "${2/default=/}" +_parser_shift +EOF +_parser_setup_flag "-q --quiet" 0 +_parser_setup_flag_help \ +"Supress the normal output, only show success/error upload messages for files, and one extra line at the beginning for folder showing no. of files and sub folders." +_parser_setup_flag_preprocess 4<<'EOF' +unset QUIET +EOF +_parser_setup_flag_process 4<<'EOF' +export QUIET="_print_center_quiet" +EOF +_parser_setup_flag "-R --retry" 1 required "num of retries" +_parser_setup_flag_help \ +"Retry the file upload if it fails, postive integer as argument. Currently only for file uploads." +_parser_setup_flag_preprocess 4<<'EOF' +unset RETRY +EOF +_parser_setup_flag_process 4<<'EOF' +if [ "$((2))" -gt 0 ] 2>| /dev/null 1>&2; then + export RETRY="${2}" && _parser_shift +else + printf "Error: -R/--retry only takes positive integers as arguments, min = 1, max = infinity.\n" + exit 1 +fi +EOF +_parser_setup_flag "-in --include" 1 required "pattern" +_parser_setup_flag_help \ +"Only include the files with the given pattern to upload - Applicable for folder uploads. + +e.g: ${0##*/} local_folder --include '*1*', will only include with files with pattern '1' in the name." +_parser_setup_flag_preprocess 4<<'EOF' +unset INCLUDE_FILES +EOF +_parser_setup_flag_process 4<<'EOF' +INCLUDE_FILES="${INCLUDE_FILES} -name '${2}' " && _parser_shift +EOF +_parser_setup_flag "-ex --exclude" 1 required "pattern" +_parser_setup_flag_help \ +"Exclude the files with the given pattern from uploading. - Applicable for folder uploads. + +e.g: ${0##*/} local_folder --exclude "*1*", will exclude all the files pattern '1' in the name." +_parser_setup_flag_preprocess 4<<'EOF' +unset EXCLUDE_FILES +EOF +_parser_setup_flag_process 4<<'EOF' +EXCLUDE_FILES="${EXCLUDE_FILES} -name ! '${2}' " && _parser_shift +EOF +_parser_setup_flag "--hide" 0 +_parser_setup_flag_help \ +"This flag will prevent the script to print sensitive information like root folder id and drivelink." +_parser_setup_flag_preprocess 4<<'EOF' +unset HIDE_INFO +EOF +_parser_setup_flag_process 4<<'EOF' +HIDE_INFO=":" +EOF +_parser_setup_flag "-v --verbose" 0 +_parser_setup_flag_help \ +"Display detailed message (only for non-parallel uploads)." +_parser_setup_flag_preprocess 4<<'EOF' +unset VERBOSE +EOF +_parser_setup_flag_process 4<<'EOF' +export VERBOSE="true" +EOF +_parser_setup_flag "-V --verbose-progress" 0 +_parser_setup_flag_help \ +"Display detailed message and detailed upload progress(only for non-parallel uploads)." +_parser_setup_flag_preprocess 4<<'EOF' +unset VERBOSE_PROGRESS +EOF +_parser_setup_flag_process 4<<'EOF' +export VERBOSE_PROGRESS="true" +EOF +_parser_setup_flag "--skip-internet-check" 0 +_parser_setup_flag_help \ +"Do not check for internet connection, recommended to use in sync jobs." +_parser_setup_flag_preprocess 4<<'EOF' +unset SKIP_INTERNET_CHECK +EOF +_parser_setup_flag_process 4<<'EOF' +export SKIP_INTERNET_CHECK=":" +EOF +_parser_setup_flag "-V --version --info" 0 +_parser_setup_flag_help \ +"Show detailed info, only if script is installed system wide." +_parser_setup_flag_preprocess 4<<'EOF' +################################################### +# Print info if installed +################################################### +_version_info() { + export COMMAND_NAME REPO INSTALL_PATH TYPE TYPE_VALUE + if command -v "${COMMAND_NAME}" 1> /dev/null && [ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]; then + for i in REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA CONFIG; do + value_version_info="" + _set_value i value_version_info "${i}" + printf "%s\n" "${i}=${value_version_info}" + done | sed -e "s/=/: /g" + else + printf "%s\n" "google-drive-upload is not installed system wide." + fi + exit 0 +} +EOF +_parser_setup_flag_process 4<<'EOF' +_version_info +EOF +_parser_setup_flag "-D --debug" 0 +_parser_setup_flag_help \ +"Display script command trace." +_parser_setup_flag_preprocess 4<<'EOF' +unset DEBUG +EOF +_parser_setup_flag_process 4<<'EOF' +export DEBUG="true" +EOF +_parser_setup_flag "-h --help" 1 optional "flag name" +_parser_setup_flag_help \ +"Print help for all flags and basic usage instructions. + +To see help for a specific flag, --help flag_name ( with or without dashes ) + e.g: ${0##*/} --help aria" +_parser_setup_flag_preprocess 4<<'EOF' +################################################### +# 1st arg - can be flag name +# if 1st arg given, print specific flag help +# otherwise print full help +################################################### +_usage() { + [ -n "${1}" ] && { + help_usage_usage="" + _flag_help "${1}" help_usage_usage + + if [ -z "${help_usage_usage}" ]; then + printf "%s\n" "Error: No help found for ${1}" + else + printf "%s\n%s\n%s\n" "${__PARSER_BAR}" "${help_usage_usage}" "${__PARSER_BAR}" + fi + exit 0 + } + + printf "%s\n" "${_PARSER_ALL_HELP}" + exit 0 +} +EOF +_parser_setup_flag_process 4<<'EOF' +_usage "${2}" +EOF +[ "${GUPLOAD_INSTALLED_WITH:-}" = script ]&&{ +_parser_setup_flag "-u --update" 0 +_parser_setup_flag_help \ +"Update the installed script in your system." +_parser_setup_flag_process 4<<'EOF' +_check_debug && _update && { exit 0 || exit 1; } +EOF +_parser_setup_flag "--uninstall" 0 +_parser_setup_flag_help \ +"Uninstall script, remove related files." +_parser_setup_flag_process 4<<'EOF' +_check_debug && _update uninstall && { exit 0 || exit 1; } +EOF +} +return 0 +} +_account_name_valid(){ +name_account_name_valid="${1:?}" account_name_regex_account_name_valid='^([A-Za-z0-9_])+$' +_assert_regex "$account_name_regex_account_name_valid" "$name_account_name_valid"||return 1 +return 0 +} +_account_exists(){ +name_account_exists="${1:-}" client_id_account_exists="" client_secret_account_exists="" refresh_token_account_exists="" +_account_name_valid "$name_account_exists"||return 1 +_set_value indirect client_id_account_exists "ACCOUNT_${name_account_exists}_CLIENT_ID" +_set_value indirect client_secret_account_exists "ACCOUNT_${name_account_exists}_CLIENT_SECRET" +_set_value indirect refresh_token_account_exists "ACCOUNT_${name_account_exists}_REFRESH_TOKEN" +[ -z "${client_id_account_exists:+${client_secret_account_exists:+$refresh_token_account_exists}}" ]&&return 1 +return 0 +} +_all_accounts(){ +export CONFIG QUIET +{ _reload_config&&_handle_old_config;}||return 1 +COUNT=0 +while read -r account <&4&&[ -n "$account" ];do +_account_exists "$account"&&{ [ "$COUNT" = 0 ]&&"${QUIET:-_print_center}" "normal" " All available accounts. " "="||:;}&&printf "%b" "$((COUNT+=1)). $account \n"&&_set_value direct "ACC_${COUNT}_ACC" "$account" +done 4<|/dev/null +"${QUIET:-_print_center}" "normal" " New account name: " "=" +"${QUIET:-_print_center}" "normal" "Info: Account names can only contain alphabets / numbers / dashes." " "&&printf '\n' +} +until [ -n "$name_valid_set_new_account_name" ];do +if [ -n "$new_account_name_set_new_account_name" ];then +if _account_name_valid "$new_account_name_set_new_account_name";then +if _account_exists "$new_account_name_set_new_account_name";then +"${QUIET:-_print_center}" "normal" " Warning: Given account ( $new_account_name_set_new_account_name ) already exists, input different name. " "-" 1>&2 +unset new_account_name_set_new_account_name&&continue +else +export new_account_name_set_new_account_name="$new_account_name_set_new_account_name" NEW_ACCOUNT_NAME="$new_account_name_set_new_account_name"&&name_valid_set_new_account_name="true"&&continue +fi +else +"${QUIET:-_print_center}" "normal" " Warning: Given account name ( $new_account_name_set_new_account_name ) invalid, input different name. " "-" +unset new_account_name_set_new_account_name&&continue +fi +else +[ -t 1 ]||{ "${QUIET:-_print_center}" "normal" " Error: Not running in an interactive terminal, cannot ask for new account name. " 1>&2&&return 1;} +printf -- "-> \033[?7l" +read -r new_account_name_set_new_account_name +printf '\033[?7h' +fi +_clear_line 1 +done +"${QUIET:-_print_center}" "normal" " Given account name: $NEW_ACCOUNT_NAME " "=" +export ACCOUNT_NAME="$NEW_ACCOUNT_NAME" +return 0 +} +_delete_account(){ +export CONFIG QUIET +{ _reload_config&&_handle_old_config;}||return 1 +account_delete_account="${1:?Error: give account name}"&&unset regex_delete_account config_without_values_delete_account +if _account_exists "$account_delete_account";then +regex_delete_account="^ACCOUNT_${account_delete_account}_(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)|DEFAULT_ACCOUNT=\"$account_delete_account\"" +config_without_values_delete_account="$(grep -vE "$regex_delete_account" -- "$CONFIG")" +chmod u+w -- "$CONFIG"||return 1 +printf "%s\n" "$config_without_values_delete_account" >|"$CONFIG"||return 1 +chmod "a-w-r-x,u+r" -- "$CONFIG"||return 1 +"${QUIET:-_print_center}" "normal" " Successfully deleted account ( $account_delete_account ) from config. " "-" +else +"${QUIET:-_print_center}" "normal" " Error: Cannot delete account ( $account_delete_account ) from config. No such account exists " "-" 1>&2 +fi +return 0 +} +_handle_old_config(){ +export CLIENT_ID CLIENT_SECRET REFRESH_TOKEN ROOT_FOLDER ROOT_FOLDER_NAME +[ -n "${CLIENT_ID:+${CLIENT_SECRET:+$REFRESH_TOKEN}}" ]&&{ +account_name_handle_old_config="default" regex_check_handle_old_config config_without_values_handle_old_config count_handle_old_config +until ! _account_exists "$account_name_handle_old_config";do +account_name_handle_old_config="$account_name_handle_old_config$((count_handle_old_config+=1))" +done +regex_check_handle_old_config="^(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)" +config_without_values_handle_old_config="$(grep -vE "$regex_check_handle_old_config" -- "$CONFIG")" +chmod u+w -- "$CONFIG"||return 1 +printf "%s\n%s\n%s\n%s\n%s\n%s\n" \ +"ACCOUNT_${account_name_handle_old_config}_CLIENT_ID=\"$CLIENT_ID\"" \ +"ACCOUNT_${account_name_handle_old_config}_CLIENT_SECRET=\"$CLIENT_SECRET\"" \ +"ACCOUNT_${account_name_handle_old_config}_REFRESH_TOKEN=\"$REFRESH_TOKEN\"" \ +"ACCOUNT_${account_name_handle_old_config}_ROOT_FOLDER=\"$ROOT_FOLDER\"" \ +"ACCOUNT_${account_name_handle_old_config}_ROOT_FOLDER_NAME=\"$ROOT_FOLDER_NAME\"" \ +"$config_without_values_handle_old_config" >|"$CONFIG"||return 1 +chmod "a-w-r-x,u+r" -- "$CONFIG"||return 1 +_reload_config||return 1 +} +return 0 +} +_check_credentials(){ +export CONFIG CONFIG_INFO DEFAULT_ACCOUNT NEW_ACCOUNT_NAME CUSTOM_ACCOUNT_NAME QUIET COUNT +{ _reload_config&&_handle_old_config;}||return 1 +ACCOUNT_NAME="$DEFAULT_ACCOUNT" +if [ -n "$NEW_ACCOUNT_NAME" ];then +_set_new_account_name "$NEW_ACCOUNT_NAME"||return 1 +_check_account_credentials "$ACCOUNT_NAME"||return 1 +else +if [ -n "$CUSTOM_ACCOUNT_NAME" ];then +if _account_exists "$CUSTOM_ACCOUNT_NAME";then +ACCOUNT_NAME="$CUSTOM_ACCOUNT_NAME" +else +"${QUIET:-_print_center}" "normal" " Error: No such account ( $CUSTOM_ACCOUNT_NAME ) exists. " "-"&&return 1 +fi +elif [ -n "$DEFAULT_ACCOUNT" ];then +_account_exists "$DEFAULT_ACCOUNT"||{ +_update_config DEFAULT_ACCOUNT "" "$CONFIG"&&unset DEFAULT_ACCOUNT ACCOUNT_NAME&&UPDATE_DEFAULT_ACCOUNT="_update_config" +} +else +UPDATE_DEFAULT_ACCOUNT="_update_config" +fi +if [ -z "$ACCOUNT_NAME" ];then +if _all_accounts 2>|/dev/null&&[ "$COUNT" -gt 0 ];then +if [ "$COUNT" -eq 1 ];then +_set_value indirect ACCOUNT_NAME "ACC_1_ACC" +else +"${QUIET:-_print_center}" "normal" " Above accounts are configured, but default one not set. " "=" +if [ -t 1 ];then +"${QUIET:-_print_center}" "normal" " Choose default account: " "-" +until [ -n "$ACCOUNT_NAME" ];do +printf -- "-> \033[?7l" +read -r account_name_check_credentials +printf '\033[?7h' +if [ "$account_name_check_credentials" -gt 0 ]&&[ "$account_name_check_credentials" -le "$COUNT" ];then +_set_value indirect ACCOUNT_NAME "ACC_${COUNT}_ACC" +else +_clear_line 1 +fi +done +else +printf "%s\n" "Warning: Script is not running in a terminal, choosing first account as default." +_set_value indirect ACCOUNT_NAME "ACC_1_ACC" +fi +fi +else +_set_new_account_name ""||return 1 +_check_account_credentials "$ACCOUNT_NAME"||return 1 +fi +fi +_check_account_credentials "$ACCOUNT_NAME"||return 1 +fi +"${UPDATE_DEFAULT_ACCOUNT:-:}" DEFAULT_ACCOUNT "$ACCOUNT_NAME" "$CONFIG" +"${UPDATE_DEFAULT_CONFIG:-:}" CONFIG "$CONFIG" "$CONFIG_INFO" +[ -n "$CONTINUE_WITH_NO_INPUT" ]||_token_bg_service +return 0 +} +_check_account_credentials(){ +account_name_check_account_credentials="${1:?Give account name}" +{ +_check_client ID "$account_name_check_account_credentials"&&_check_client SECRET "$account_name_check_account_credentials"&&_check_refresh_token "$account_name_check_account_credentials"&&_check_access_token "$account_name_check_account_credentials" check +}||return 1 +return 0 +} +_check_client(){ +export CONFIG QUIET +type_check_client="CLIENT_${1:?Error: ID or SECRET}" account_name_check_client="${2:-}" +unset type_value_check_client type_name_check_client valid_check_client client_check_client message_check_client regex_check_client +if [ "$type_check_client" = "CLIENT_ID" ];then +regex_check_client='[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com' +else +regex_check_client='[0-9A-Za-z_-]+' +fi +type_name_check_client="${account_name_check_client:+ACCOUNT_${account_name_check_client}_}$type_check_client" +_set_value indirect type_value_check_client "$type_name_check_client" +until [ -n "$type_value_check_client" ]&&[ -n "$valid_check_client" ];do +[ -n "$type_value_check_client" ]&&{ +if _assert_regex "$regex_check_client" "$type_value_check_client";then +[ -n "$client_check_client" ]&&{ _update_config "$type_name_check_client" "$type_value_check_client" "$CONFIG"||return 1;} +valid_check_client="true"&&continue +else +{ [ -n "$client_check_client" ]&&message_check_client="- Try again";}||message_check_client="in config ( $CONFIG )" +"${QUIET:-_print_center}" "normal" " Invalid Client $1 $message_check_client " "-"&&unset "$type_name_check_client" client +fi +} +[ -z "$client_check_client" ]&&printf "\n"&&"${QUIET:-_print_center}" "normal" " Enter Client $1 " "-" +[ -n "$client_check_client" ]&&_clear_line 1 +printf -- "-> " +read -r "${type_name_check_client?}"&&client_check_client=1 +_set_value indirect type_value_check_client "$type_name_check_client" +done +_set_value direct "$type_name_check_client" "$type_value_check_client" +_set_value direct "$type_check_client" "$type_value_check_client" +return 0 +} +_check_refresh_token(){ +export CLIENT_ID CLIENT_SECRET QUIET CONFIG CURL_PROGRESS SCOPE REDIRECT_URI TOKEN_URL +[ -z "${CLIENT_ID:+$CLIENT_SECRET}" ]&&return 1 +account_name_check_refresh_token="${1:-}" +refresh_token_regex='[0-9]//[0-9A-Za-z_-]+' authorization_code_regex='[0-9]/[0-9A-Za-z_-]+' +_set_value direct refresh_token_name_check_refresh_token "${account_name_check_refresh_token:+ACCOUNT_${account_name_check_refresh_token}_}REFRESH_TOKEN" +_set_value indirect refresh_token_value_check_refresh_token "${refresh_token_name_check_refresh_token:-}" +[ "${REFETCH_REFRESH_TOKEN:-false}" = "true" ]&&{ +unset refresh_token_value_check_refresh_token +} +[ -n "$refresh_token_value_check_refresh_token" ]&&{ +! _assert_regex "$refresh_token_regex" "$refresh_token_value_check_refresh_token"&&"${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token in config file, follow below steps.. " "-"&&unset refresh_token_value_check_refresh_token +} +[ -z "$refresh_token_value_check_refresh_token" ]&&{ +printf "\n"&&"${QUIET:-_print_center}" "normal" "If you have a refresh token generated, then type the token, else leave blank and press return key.." " " +printf "\n"&&"${QUIET:-_print_center}" "normal" " Refresh Token " "-"&&printf -- "-> " +read -r refresh_token_value_check_refresh_token +if [ -n "$refresh_token_value_check_refresh_token" ];then +"${QUIET:-_print_center}" "normal" " Checking refresh token.. " "-" +if _assert_regex "$refresh_token_regex" "$refresh_token_value_check_refresh_token";then +_set_value direct REFRESH_TOKEN "$refresh_token_value_check_refresh_token" +{ _check_access_token "$account_name_check_refresh_token" skip_check&&_update_config "$refresh_token_name_check_refresh_token" "$refresh_token_value_check_refresh_token" "$CONFIG"&&_clear_line 1;}||check_error_check_refresh_token=true +else +check_error_check_refresh_token=true +fi +[ -n "$check_error_check_refresh_token" ]&&"${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token given, follow below steps to generate.. " "-"&&unset refresh_token_value_check_refresh_token +else +"${QUIET:-_print_center}" "normal" " No Refresh token given, follow below steps to generate.. " "-"&&unset refresh_token_value_check_refresh_token +fi +[ -z "$refresh_token_value_check_refresh_token" ]&&{ +printf "\n"&&"${QUIET:-_print_center}" "normal" "Visit the below URL, tap on allow and then enter the code obtained" " " +URL="https://accounts.google.com/o/oauth2/auth?client_id=$CLIENT_ID&redirect_uri=$REDIRECT_URI&scope=$SCOPE&response_type=code&prompt=consent" +printf "\n%s\n" "$URL" +unset AUTHORIZATION_CODE authorization_code AUTHORIZATION_CODE_VALID response +until [ -n "$AUTHORIZATION_CODE" ]&&[ -n "$AUTHORIZATION_CODE_VALID" ];do +[ -n "$AUTHORIZATION_CODE" ]&&{ +if _assert_regex "$authorization_code_regex" "$AUTHORIZATION_CODE";then +AUTHORIZATION_CODE_VALID="true"&&continue +else +"${QUIET:-_print_center}" "normal" " Invalid CODE given, try again.. " "-"&&unset AUTHORIZATION_CODE authorization_code +fi +} +{ [ -z "$authorization_code" ]&&printf "\n"&&"${QUIET:-_print_center}" "normal" " Enter the authorization code " "-";}||_clear_line 1 +printf -- "-> \033[?7l" +read -r AUTHORIZATION_CODE&&authorization_code=1 +printf '\033[?7h' +done +response_check_refresh_token="$(_curl --compressed "$CURL_PROGRESS" -X POST \ +--data "code=$AUTHORIZATION_CODE&client_id=$CLIENT_ID&client_secret=$CLIENT_SECRET&redirect_uri=$REDIRECT_URI&grant_type=authorization_code" "$TOKEN_URL")"||: +_clear_line 1 1>&2 +refresh_token_value_check_refresh_token="$(printf "%s\n" "$response_check_refresh_token"|_json_value refresh_token 1 1)"||{ printf "%s\n" "Error: Cannot fetch refresh token, make sure the authorization code was correct."&&return 1;} +_set_value direct REFRESH_TOKEN "$refresh_token_value_check_refresh_token" +{ _check_access_token "$account_name_check_refresh_token" skip_check "$response_check_refresh_token"&&_update_config "$refresh_token_name_check_refresh_token" "$refresh_token_value_check_refresh_token" "$CONFIG";}||return 1 +} +printf "\n" +} +_set_value direct "$refresh_token_name_check_refresh_token" "$refresh_token_value_check_refresh_token" +_set_value direct REFRESH_TOKEN "$refresh_token_value_check_refresh_token" +return 0 +} +_check_access_token(){ +export CLIENT_ID CLIENT_SECRET REFRESH_TOKEN CONFIG QUIET +[ -z "${CLIENT_ID:+${CLIENT_SECRET:+$REFRESH_TOKEN}}" ]&&return 1 +account_name_check_access_token="${1:-}" no_check_check_access_token="${2:-false}" response_json_check_access_token="${3:-}" +unset token_name_check_access_token token_expiry_name_check_access_token token_value_check_access_token token_expiry_value_check_access_token response_check_access_token +access_token_regex='ya29\.[0-9A-Za-z_-]+' +token_name_check_access_token="${account_name_check_access_token:+ACCOUNT_${account_name_check_access_token}_}ACCESS_TOKEN" +token_expiry_name_check_access_token="${token_name_check_access_token}_EXPIRY" +_set_value indirect token_value_check_access_token "$token_name_check_access_token" +_set_value indirect token_expiry_value_check_access_token "$token_expiry_name_check_access_token" +[ "$no_check_check_access_token" = skip_check ]||[ -z "$token_value_check_access_token" ]||[ "${token_expiry_value_check_access_token:-0}" -lt "$(_epoch)" ]||! _assert_regex "$access_token_regex" "$token_value_check_access_token"&&{ +response_check_access_token="${response_json_check_access_token:-$(curl --compressed -s -X POST --data \ +"client_id=$CLIENT_ID&client_secret=$CLIENT_SECRET&refresh_token=$REFRESH_TOKEN&grant_type=refresh_token" "$TOKEN_URL")}"||: +if token_value_check_access_token="$(printf "%s\n" "$response_check_access_token"|_json_value access_token 1 1)";then +token_expiry_value_check_access_token="$(($(_epoch)+$(printf "%s\n" "$response_check_access_token"|_json_value expires_in 1 1)-1))" +_update_config "$token_name_check_access_token" "$token_value_check_access_token" "$CONFIG"||return 1 +_update_config "$token_expiry_name_check_access_token" "$token_expiry_value_check_access_token" "$CONFIG"||return 1 +else +"${QUIET:-_print_center}" "justify" "Error: Something went wrong" ", printing error." "=" 1>&2 +printf "%s\n" "$response_check_access_token" 1>&2 +printf "%s\n" "If refresh token has expired, then use --oauth-refetch-refresh-token to refetch refresh token, if the error is not clear make a issue on github repository." +return 1 +fi +} +_set_value direct ACCESS_TOKEN "$token_value_check_access_token" +_set_value direct ACCESS_TOKEN_EXPIRY "$token_expiry_value_check_access_token" +_set_value direct INITIAL_ACCESS_TOKEN "$ACCESS_TOKEN" +return 0 +} +_reload_config(){ +export CONFIG +{ [ -r "$CONFIG" ]&&_parse_config "$CONFIG";}||{ printf "" >>"$CONFIG"||return 1;} +return 0 +} +_token_bg_service(){ +export MAIN_PID ACCESS_TOKEN ACCESS_TOKEN_EXPIRY TMPFILE +[ -z "$MAIN_PID" ]&&return 0 +printf "%b\n" "ACCESS_TOKEN=\"$ACCESS_TOKEN\"\nACCESS_TOKEN_EXPIRY=\"$ACCESS_TOKEN_EXPIRY\"" >|"${TMPFILE}_ACCESS_TOKEN" +{ +until ! kill -0 "$MAIN_PID" 2>|/dev/null 1>&2;do +. "${TMPFILE}_ACCESS_TOKEN" +CURRENT_TIME="$(_epoch)" +REMAINING_TOKEN_TIME="$((ACCESS_TOKEN_EXPIRY-CURRENT_TIME))" +if [ "$REMAINING_TOKEN_TIME" -le 300 ];then +CONFIG="${TMPFILE}_ACCESS_TOKEN" _timeout 30 _check_access_token "" skip_check||: +else +TOKEN_PROCESS_TIME_TO_SLEEP="$(if [ "$REMAINING_TOKEN_TIME" -le 301 ];then +printf "0\n" +else +printf "%s\n" "$((REMAINING_TOKEN_TIME-300))" +fi)" +sleep "$TOKEN_PROCESS_TIME_TO_SLEEP" +fi +sleep 1 +done +}& +export ACCESS_TOKEN_SERVICE_PID="$!" +return 0 +} +_bytes_to_human(){ +b_bytes_to_human="$(printf "%.0f\n" "${1:-0}")" s_bytes_to_human=0 +d_bytes_to_human='' type_bytes_to_human='' +while [ "$b_bytes_to_human" -gt 1024 ];do +d_bytes_to_human="$(printf ".%02d" $((b_bytes_to_human%1024*100/1024)))" +b_bytes_to_human=$((b_bytes_to_human/1024))&&s_bytes_to_human=$((s_bytes_to_human+=1)) +done +j=0&&for i in B KB MB GB TB PB EB YB ZB;do +j="$((j+=1))"&&[ "$((j-1))" = "$s_bytes_to_human" ]&&type_bytes_to_human="$i"&&break +continue +done +printf "%s\n" "$b_bytes_to_human$d_bytes_to_human $type_bytes_to_human" +} +_check_debug(){ +export DEBUG QUIET +if [ -n "$DEBUG" ];then +set -x&&PS4='-> ' +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "$2";}||{ printf "%s%s\n" "$2" "$3";};} +_clear_line(){ :;}&&_move_cursor(){ :;}&&_newline(){ :;} +else +if [ -z "$QUIET" ];then +if _support_ansi_escapes;then +if ! _required_column_size;then +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "[ $2 ]";}||{ printf "%s\n" "[ $2$3 ]";};} +fi +export EXTRA_LOG="_print_center" CURL_PROGRESS="-#" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" +else +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "[ $2 ]";}||{ printf "%s\n" "[ $2$3 ]";};} +_clear_line(){ :;}&&_move_cursor(){ :;} +fi +_newline(){ printf "%b" "$1";} +else +_print_center(){ :;}&&_clear_line(){ :;}&&_move_cursor(){ :;}&&_newline(){ :;} +fi +set +x +fi +} +_check_internet(){ +"${EXTRA_LOG:-}" "justify" "Checking Internet Connection.." "-" +if ! _timeout 10 curl -Is google.com --compressed;then +_clear_line 1 +"${QUIET:-_print_center}" "justify" "Error: Internet connection" " not available." "=" +return 1 +fi +_clear_line 1 +} +_clear_line(){ +printf "\033[%sA\033[2K" "$1" +} +_dirname(){ +dir_dirname="${1:-.}" +dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}"&&[ -n "${dir_dirname##*/*}" ]&&dir_dirname=. +dir_dirname="${dir_dirname%/*}"&&dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" +printf '%s\n' "${dir_dirname:-/}" +} +_display_time(){ +t_display_time="$1" day_display_time="$((t_display_time/60/60/24))" +hr_display_time="$((t_display_time/60/60%24))" min_display_time="$((t_display_time/60%60))" sec_display_time="$((t_display_time%60))" +[ "$day_display_time" -gt 0 ]&&printf '%d days ' "$day_display_time" +[ "$hr_display_time" -gt 0 ]&&printf '%d hrs ' "$hr_display_time" +[ "$min_display_time" -gt 0 ]&&printf '%d minute(s) ' "$min_display_time" +[ "$day_display_time" -gt 0 ]||[ "$hr_display_time" -gt 0 ]||[ "$min_display_time" -gt 0 ]&&printf 'and ' +printf '%d seconds\n' "$sec_display_time" +} +_get_latest_sha(){ +export TYPE TYPE_VALUE REPO +unset latest_sha_get_latest_sha raw_get_latest_sha +case "${1:-$TYPE}" in +branch)\ +latest_sha_get_latest_sha="$(\ +raw_get_latest_sha="$(curl --compressed -s https://github.com/"${3:-$REPO}"/commits/"${2:-$TYPE_VALUE}".atom -r 0-2000)" +_tmp="$(printf "%s\n" "$raw_get_latest_sha"|grep -o 'Commit\/.*<' -m1||:)"&&_tmp="${_tmp##*\/}"&&printf "%s\n" "${_tmp%%<*}")" +;; +release)\ +latest_sha_get_latest_sha="$(\ +raw_get_latest_sha="$(curl -L --compressed -s https://github.com/"${3:-$REPO}"/releases/"${2:-$TYPE_VALUE}")" +_tmp="$(printf "%s\n" "$raw_get_latest_sha"|grep '="/'"${3:-$REPO}""/commit" -m1||:)"&&_tmp="${_tmp##*commit\/}"&&printf "%s\n" "${_tmp%%\"*}")" +;; +*): +esac +printf "%b" "${latest_sha_get_latest_sha:+$latest_sha_get_latest_sha\n}" +} +_json_escape(){ +mode_json_escape="${1:?Missing mode}" input_json_escape="${2:?Provide Input}" output_json_escape="" +if [ "$mode_json_escape" = "j" ];then +output_json_escape="$(printf "%s" "$input_json_escape"|sed \ +-e "s|\\\|\\\\\\\|g" \ +-e "s|\/|\\\/|g" \ +-e 's/\"/\\\"/g' \ +-e "s/$(printf '\t')/\\t/g" \ +-e "s/$(printf '\r')/\\r/g" \ +-e "s/$(printf '\f')/\\f/g")" +else +output_json_escape="$(printf "%s" "$input_json_escape"|sed \ +-e "s/$(printf '\t')/\\t/g" \ +-e "s/$(printf '\r')/\\r/g" \ +-e "s/$(printf '\f')/\\f/g")" +fi +output_json_escape="$(printf "%s" "$output_json_escape"|awk '{printf "%s%s",sep,$0; sep="\\n"} END{print ""}')" +printf "%s" "$output_json_escape" +} +_json_value(){ +{ [ "$2" -gt 0 ] 2>|/dev/null&&no_of_lines_json_value="$2";}||: +{ [ "$3" -gt 0 ] 2>|/dev/null&&num_json_value="$3";}||{ ! [ "$3" = all ]&&num_json_value=1;} +_tmp="$(grep -o "\"$1\"\:.*" ${no_of_lines_json_value:+-m} $no_of_lines_json_value)"||return 1 +printf "%s\n" "$_tmp"|sed -e 's|.*"'"$1""\":||" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "$num_json_value"p||: +return 0 +} +_parse_config(){ +_config_file_parse_config="${1:?Error: Profile config file}" +print_parse_config="${2:-false}" +[ -r "$_config_file_parse_config" ]||{ +printf "%s\n" "Error: Given config file ( $_config_file_parse_config ) is not readable." +return 1 +} +while IFS='=' read -r key val;do +{ [ -n "$key" ]&&[ -n "$val" ]&&[ -n "${key##\#*}" ];}||continue +key="${key#"${key%%[![:space:]]*}"}" +val="${val#"${val%%[![:space:]]*}"}" +key="${key%"${key##*[![:space:]]}"}" +val="${val%"${val##*[![:space:]]}"}" +case "$val" in +\"*\")val="${val#\"}" val="${val%\"}";; +\'*\')val="${val#\'}" val="${val%\'}";; +*): +esac +export "$key=$val" 2>/dev/null||printf "%s\n" "Warning: $key is not a valid variable name." +[ "$print_parse_config" = true ]&&echo "$key=$val" +done <"$_config_file_parse_config" +return 0 +} +_print_center(){ +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +term_cols_print_center="${COLUMNS:-}" +type_print_center="$1" filler_print_center="" +case "$type_print_center" in +normal)out_print_center="$2"&&symbol_print_center="$3";; +justify)if +[ $# = 3 ] +then +input1_print_center="$2" symbol_print_center="$3" to_print_print_center="" out_print_center="" +to_print_print_center="$((term_cols_print_center-5))" +{ [ "${#input1_print_center}" -gt "$to_print_print_center" ]&&out_print_center="[ $(printf "%.${to_print_print_center}s\n" "$input1_print_center")..]";}||{ out_print_center="[ $input1_print_center ]";} +else +input1_print_center="$2" input2_print_center="$3" symbol_print_center="$4" to_print_print_center="" temp_print_center="" out_print_center="" +to_print_print_center="$((term_cols_print_center*47/100))" +{ [ "${#input1_print_center}" -gt "$to_print_print_center" ]&&temp_print_center=" $(printf "%.${to_print_print_center}s\n" "$input1_print_center")..";}||{ temp_print_center=" $input1_print_center";} +to_print_print_center="$((term_cols_print_center*46/100))" +{ [ "${#input2_print_center}" -gt "$to_print_print_center" ]&&temp_print_center="$temp_print_center$(printf "%.${to_print_print_center}s\n" "$input2_print_center").. ";}||{ temp_print_center="$temp_print_center$input2_print_center ";} +out_print_center="[$temp_print_center]" +fi +;; +*)return 1 +esac +str_len_print_center="${#out_print_center}" +[ "$str_len_print_center" -ge "$((term_cols_print_center-1))" ]&&{ +printf "%s\n" "$out_print_center"&&return 0 +} +filler_print_center_len="$(((term_cols_print_center-str_len_print_center)/2))" +i_print_center=1&&while [ "$i_print_center" -le "$filler_print_center_len" ];do +filler_print_center="$filler_print_center$symbol_print_center"&&i_print_center="$((i_print_center+1))" +done +printf "%s%s%s" "$filler_print_center" "$out_print_center" "$filler_print_center" +[ "$(((term_cols_print_center-str_len_print_center)%2))" -ne 0 ]&&printf "%s" "$symbol_print_center" +printf "\n" +return 0 +} +_print_center_quiet(){ +{ [ $# = 3 ]&&printf "%s\n" "$2";}||{ printf "%s%s\n" "$2" "$3";} +} +_support_ansi_escapes(){ +unset ansi_escapes +case "${TERM:-}" in +xterm*|rxvt*|urxvt*|linux*|vt*|screen*)ansi_escapes="true";; +*): +esac +{ [ -t 2 ]&&[ -n "$ansi_escapes" ]&&return 0;}||return 1 +} +_timeout(){ +timeout_timeout="${1:?Error: Specify Timeout}"&&shift +{ +"$@"& +child="$!" +trap -- "" TERM +{ +sleep "$timeout_timeout" +kill -9 "$child" +}& +wait "$child" +} 2>|/dev/null 1>&2 +} +_update_config(){ +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +value_name_update_config="$1" value_update_config="$2" config_path_update_config="$3" +! [ -f "$config_path_update_config" ]&&: >|"$config_path_update_config" +chmod u+w -- "$config_path_update_config"||return 1 +printf "%s\n%s\n" "$(grep -v -e "^$" -e "^$value_name_update_config=" -- "$config_path_update_config"||:)" \ +"$value_name_update_config=\"$value_update_config\"" >|"$config_path_update_config"||return 1 +chmod a-w-r-x,u+r -- "$config_path_update_config"||return 1 +return 0 +} +_check_existing_file()(export \ +EXTRA_LOG \ +CURL_PROGRESS_EXTRA \ +API_URL \ +API_VERSION +[ $# -lt 2 ]&&printf "Missing arguments\n"&&return 1 +name_check_existing_file="$1" rootdir_check_existing_file="$2" mode_check_existing_file="$3" param_value_check_existing_file="$4" +unset query_check_existing_file response_check_existing_file id_check_existing_file +"$EXTRA_LOG" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2 +query_check_existing_file="$(_url_encode "name=\"$name_check_existing_file\" and '$rootdir_check_existing_file' in parents and trashed=false and 'me' in writers")" +response_check_existing_file="$(_api_request "$CURL_PROGRESS_EXTRA" \ +"$API_URL/drive/$API_VERSION/files?q=$query_check_existing_file&fields=files(id,name,mimeType${mode_check_existing_file:+,$mode_check_existing_file})&supportsAllDrives=true&includeItemsFromAllDrives=true"||:)"&&_clear_line 1 1>&2 +_clear_line 1 1>&2 +printf "%s\n" "$response_check_existing_file"|_json_value id 1 1 2>|/dev/null 1>&2||return 1 +[ -n "$mode_check_existing_file" ]&&{ +[ "$(printf "%s\n" "$response_check_existing_file"|_json_value "$mode_check_existing_file" 1 1)" = "$param_value_check_existing_file" ]||return 1 +} +printf "%s\n" "$response_check_existing_file" +return 0) +_clone_file(){ +export DESCRIPTION_FILE CHECK_MODE SKIP_DUPLICATES QUIET API_URL API_VERSION CURL_PROGRESS +[ $# -lt 5 ]&&printf "Missing arguments\n"&&return 1 +job_clone_file="$1" file_id_clone_file="$2" file_root_id_clone_file="$3" name_clone_file="$4" size_clone_file="$5" md5_clone_file="$6" +unset post_data_clone_file response_clone_file readable_size_clone_file description_clone_file&&STRING="Cloned" +readable_size_clone_file="$(_bytes_to_human "$size_clone_file")" +escaped_name_clone_file="$(_json_escape j "$name_clone_file")" print_name_clone_file="$(_json_escape p "$name_clone_file")" +[ -n "$DESCRIPTION_FILE" ]&&{ +description_clone_file="$(printf "%s\n" "$DESCRIPTION_FILE"|sed -e "s|%f|$name_clone_file|g|" -e "s|%f|$readable_size_clone_file|g|")" +description_clone_file="$(_json_escape j "$description_clone_file")" +} +post_data_clone_file="{\"parents\": [\"$file_root_id_clone_file\"]${description_clone_file:+,\"description\":\"$description_clone_file\"}}" +_print_center "justify" "$print_name_clone_file " "| $readable_size_clone_file" "=" +if [ "$job_clone_file" = update ];then +unset file_check_json_clone_file check_value_type_clone_file check_value_clone_file +case "$CHECK_MODE" in +2)check_value_type_clone_file="size" check_value_clone_file="$size_clone_file";; +3)check_value_type_clone_file="md5Checksum" check_value_clone_file="$md5_clone_file";; +*): +esac +if file_check_json_clone_file="$(_check_existing_file "$escaped_name_clone_file" "$file_root_id_clone_file" "$check_value_type_clone_file" "$check_value_clone_file")";then +if [ -n "$SKIP_DUPLICATES" ];then +_collect_file_info "$file_check_json_clone_file" "$print_name_clone_file"||return 1 +_clear_line 1 +"${QUIET:-_print_center}" "justify" "$print_name_clone_file" " already exists." "="&&return 0 +else +_print_center "justify" "Overwriting file.." "-" +{ _file_id_clone_file="$(printf "%s\n" "$file_check_json_clone_file"|_json_value id 1 1)"&&post_data_clone_file="$(_drive_info "$_file_id_clone_file" "parents,writersCanShare")";}||{ _error_logging_upload "$print_name_clone_file" "${post_data_clone_file:-$file_check_json_clone_file}"||return 1;} +if [ "$_file_id_clone_file" != "$file_id_clone_file" ];then +_api_request -s \ +-X DELETE \ +"$API_URL/drive/$API_VERSION/files/$_file_id_clone_file?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>|/dev/null 1>&2||: +STRING="Updated" +else +_collect_file_info "$file_check_json_clone_file" "$print_name_clone_file"||return 1 +fi +fi +else +_print_center "justify" "Cloning file.." "-" +fi +else +_print_center "justify" "Cloning file.." "-" +fi +response_clone_file="$(_api_request $CURL_PROGRESS \ +-X POST \ +-H "Content-Type: application/json; charset=UTF-8" \ +-d "$post_data_clone_file" \ +"$API_URL/drive/$API_VERSION/files/$file_id_clone_file/copy?supportsAllDrives=true&includeItemsFromAllDrives=true"||:)" +for _ in 1 2 3;do _clear_line 1;done +_collect_file_info "$response_clone_file" "$print_name_clone_file"||return 1 +"${QUIET:-_print_center}" "justify" "$print_name_clone_file " "| $readable_size_clone_file | $STRING" "=" +return 0 +} +_create_directory(){ +export EXTRA_LOG CURL_PROGRESS_EXTRA API_VERSION API_URL +[ $# -lt 2 ]&&printf "Missing arguments\n"&&return 1 +dirname_create_directory="${1##*/}" rootdir_create_directory="$2" +unset query_create_directory search_response_create_directory folder_id_create_directory +escaped_dirname_create_directory="$(_json_escape j "$dirname_create_directory")" +print_dirname_create_directory="$(_json_escape p "$dirname_create_directory")" +"$EXTRA_LOG" "justify" "Creating GDRIVE DIR:" " $print_dirname_create_directory" "-" 1>&2 +query_create_directory="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name=\"$escaped_dirname_create_directory\" and trashed=false and '$rootdir_create_directory' in parents")" +search_response_create_directory="$(_api_request "$CURL_PROGRESS_EXTRA" \ +"$API_URL/drive/$API_VERSION/files?q=$query_create_directory&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true"||:)"&&_clear_line 1 1>&2 +if ! folder_id_create_directory="$(printf "%s\n" "$search_response_create_directory"|_json_value id 1 1)";then +unset create_folder_post_data_create_directory create_folder_response_create_directory +create_folder_post_data_create_directory="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"$escaped_dirname_create_directory\",\"parents\": [\"$rootdir_create_directory\"]}" +create_folder_response_create_directory="$(_api_request "$CURL_PROGRESS_EXTRA" \ +-X POST \ +-H "Content-Type: application/json; charset=UTF-8" \ +-d "$create_folder_post_data_create_directory" \ +"$API_URL/drive/$API_VERSION/files?fields=id&supportsAllDrives=true&includeItemsFromAllDrives=true"||:)"&&_clear_line 1 1>&2 +fi +_clear_line 1 1>&2 +{ folder_id_create_directory="${folder_id_create_directory:-$(printf "%s\n" "$create_folder_response_create_directory"|_json_value id 1 1)}"&&printf "%s\n" "$folder_id_create_directory";}||{ printf "%s\n" "$create_folder_response_create_directory" 1>&2&&return 1;} +return 0 +} +_drive_info(){ +export EXTRA_LOG CURL_PROGRESS_EXTRA API_URL API_VERSION +[ $# -lt 2 ]&&printf "Missing arguments\n"&&return 1 +folder_id_drive_info="$1" fetch_drive_info="$2" +unset search_response_drive_info +"$EXTRA_LOG" "justify" "Fetching info.." "-" 1>&2 +search_response_drive_info="$(_api_request "$CURL_PROGRESS_EXTRA" \ +"$API_URL/drive/$API_VERSION/files/$folder_id_drive_info?fields=$fetch_drive_info&supportsAllDrives=true&includeItemsFromAllDrives=true"||:)"&&_clear_line 1 1>&2 +_clear_line 1 1>&2 +printf "%b" "${search_response_drive_info:+$search_response_drive_info\n}" +return 0 +} +_extract_id(){ +[ $# = 0 ]&&printf "Missing arguments\n"&&return 1 +LC_ALL=C id_extract_id="$1" +case "$id_extract_id" in +*'drive.google.com'*'id='*)_tmp="${id_extract_id##*id=}"&&_tmp="${_tmp%%\?*}"&&id_extract_id="${_tmp%%\&*}";; +*'drive.google.com'*'file/d/'*|'http'*'docs.google.com'*'/d/'*)_tmp="${id_extract_id##*\/d\/}"&&_tmp="${_tmp%%\/*}"&&_tmp="${_tmp%%\?*}"&&id_extract_id="${_tmp%%\&*}";; +*'drive.google.com'*'drive'*'folders'*)_tmp="${id_extract_id##*\/folders\/}"&&_tmp="${_tmp%%\?*}"&&id_extract_id="${_tmp%%\&*}";; +*): +esac +printf "%b" "${id_extract_id:+$id_extract_id\n}" +} +_upload_file(){ +export QUIET DESCRIPTION_FILE CHECK_MODE SKIP_DUPLICATES API_URL API_VERSION INFO_PATH +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +job_upload_file="$1" input_upload_file="$2" folder_id_upload_file="$3" +unset slug_upload_file inputname_upload_file extension_upload_file inputsize_upload_file readable_size_upload_file request_method_upload_file \ +url_upload_file postdata_upload_file uploadlink_upload_file upload_body_upload_file mime_type_upload_file description_upload_file \ +resume_args1_upload_file resume_args2_upload_file resume_args3_upload_file +slug_upload_file="${input_upload_file##*/}" +escaped_slug_upload_file="$(_json_escape j "$slug_upload_file")" print_slug_upload_file="$(_json_escape p "$slug_upload_file")" +inputname_upload_file="${slug_upload_file%.*}" +extension_upload_file="${slug_upload_file##*.}" +inputsize_upload_file="$(($(wc -c <"$input_upload_file")))"&&content_length_upload_file="$inputsize_upload_file" +readable_size_upload_file="$(_bytes_to_human "$inputsize_upload_file")" +[ "$inputname_upload_file" = "$extension_upload_file" ]&&{ +mime_type_upload_file="$(file --brief --mime-type "$input_upload_file"||mimetype --output-format %m "$input_upload_file")" 2>|/dev/null||{ +"${QUIET:-_print_center}" "justify" "Error: file or mimetype command not found." "="&&printf "\n" +exit 1 +} +} +[ -n "$DESCRIPTION_FILE" ]&&{ +description_upload_file="$(printf "%s\n" "$DESCRIPTION_FILE"|sed -e "s|%f|$slug_upload_file|g" -e "s|%f|$readable_size_upload_file|g" -e "s|%m|$mime_type_upload_file|g")" +description_upload_file="$(_json_escape j "$description_upload_file")" +} +_print_center "justify" "$print_slug_upload_file" " | $readable_size_upload_file" "=" +[ "$job_upload_file" = update ]&&{ +unset file_check_json_upload_file check_value_upload_file +case "$CHECK_MODE" in +2)check_value_type_upload_file="size" check_value_upload_file="$inputsize_upload_file";; +3)\ +check_value_type_upload_file="md5Checksum" +check_value_upload_file="$(md5sum "$input_upload_file")"||{ +"${QUIET:-_print_center}" "justify" "Error: cannot calculate md5sum of given file." "=" 1>&2 +return 1 +} +check_value_upload_file="${check_value_upload_file%% *}" +;; +*): +esac +if file_check_json_upload_file="$(_check_existing_file "$escaped_slug_upload_file" "$folder_id_upload_file" "$check_value_type_upload_file" "$check_value_upload_file")";then +if [ -n "$SKIP_DUPLICATES" ];then +_collect_file_info "$file_check_json_upload_file" "$print_slug_upload_file"||return 1 +STRING="Skipped" _normal_logging_upload +return 0 +else +request_method_upload_file="PATCH" +_file_id_upload_file="$(printf "%s\n" "$file_check_json_upload_file"|_json_value id 1 1)"||{ _error_logging_upload "$print_slug_upload_file" "$file_check_json_upload_file"||return 1;} +url_upload_file="$API_URL/upload/drive/$API_VERSION/files/$_file_id_upload_file?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" +postdata_upload_file="{\"mimeType\": \"$mime_type_upload_file\",\"name\": \"$escaped_slug_upload_file\",\"addParents\": [\"$folder_id_upload_file\"]${description_upload_file:+,\"description\":\"$description_upload_file\"}}" +STRING="Updated" +fi +else +job_upload_file="create" +fi +} +[ "$job_upload_file" = create ]&&{ +url_upload_file="$API_URL/upload/drive/$API_VERSION/files?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" +request_method_upload_file="POST" +postdata_upload_file="{\"mimeType\": \"$mime_type_upload_file\",\"name\": \"$escaped_slug_upload_file\",\"parents\": [\"$folder_id_upload_file\"]${description_upload_file:+,\"description\":\"$description_upload_file\"}}" +STRING="Uploaded" +} +__file_upload_file="$INFO_PATH/${print_slug_upload_file}__::__${folder_id_upload_file}__::__$inputsize_upload_file" +if [ -r "$__file_upload_file" ];then +uploadlink_upload_file="$(cat "$__file_upload_file"||:)" +http_code_upload_file="$(curl --compressed -s -X PUT "$uploadlink_upload_file" -o /dev/null --write-out %"{http_code}")"||: +case "$http_code_upload_file" in +308)\ +uploaded_range_upload_file="$(\ +raw_upload_file="$(curl --compressed -s -X PUT \ +-H "Content-Range: bytes */$content_length_upload_file" \ +--url "$uploadlink_upload_file" --globoff -D -||:)"&&printf "%s\n" "${raw_upload_file##*[R,r]ange: bytes=0-}"|while +read -r line +do printf "%s\n" "${line%%"$(printf '\r')"}"&&break;done)" +if [ "$uploaded_range_upload_file" -gt 0 ] 2>|/dev/null;then +_print_center "justify" "Resuming interrupted upload.." "-"&&_newline "\n" +content_range_upload_file="$(printf "bytes %s-%s/%s\n" "$((uploaded_range_upload_file+1))" "$((inputsize_upload_file-1))" "$inputsize_upload_file")" +content_length_upload_file="$((inputsize_upload_file-$((uploaded_range_upload_file+1))))" +resume_args1_upload_file='-s' resume_args2_upload_file='--http1.1' resume_args3_upload_file="Content-Range: $content_range_upload_file" +_upload_file_from_uri _clear_line +_collect_file_info "$upload_body_upload_file" "$print_slug_upload_file"||return 1 +_normal_logging_upload +_remove_upload_session +else +_full_upload||return 1 +fi +;; +4[0-9][0-9]|000)_full_upload||return 1 +;; +201|200)\ +upload_body_upload_file="$http_code_upload_file" +_collect_file_info "$upload_body_upload_file" "$print_slug_upload_file"||return 1 +_normal_logging_upload +_remove_upload_session +;; +*): +esac +else +_full_upload||return 1 +fi +return 0 +} +_generate_upload_link(){ +"${EXTRA_LOG:-}" "justify" "Generating upload link.." "-" 1>&2 +uploadlink_upload_file="$(_api_request "${CURL_PROGRESS_EXTRA:-}" \ +-X "$request_method_upload_file" \ +-H "Content-Type: application/json; charset=UTF-8" \ +-H "X-Upload-Content-Type: $mime_type_upload_file" \ +-H "X-Upload-Content-Length: $inputsize_upload_file" \ +-d "$postdata_upload_file" \ +"$url_upload_file" \ +-D -||:)"&&_clear_line 1 1>&2 +_clear_line 1 1>&2 +case "$uploadlink_upload_file" in +*'ocation: '*'upload_id'*)uploadlink_upload_file="$(printf "%s\n" "${uploadlink_upload_file##*[L,l]ocation: }"|while read -r line;do printf "%s\n" "${line%%"$(printf '\r')"}"&&break;done)"&&return 0;; +*)return 1 +esac +return 0 +} +_upload_file_from_uri(){ +_print_center "justify" "Uploading.." "-" +upload_body_upload_file="$(_api_request ${CURL_PROGRESS:-} \ +-X PUT \ +-H "Content-Type: $mime_type_upload_file" \ +-H "Content-Length: $content_length_upload_file" \ +-H "Slug: $print_slug_upload_file" \ +-T "$input_upload_file" \ +-o- \ +--url "$uploadlink_upload_file" \ +--globoff \ +${CURL_SPEED:-} ${resume_args1_upload_file:-} ${resume_args2_upload_file:-} \ +-H "$resume_args3_upload_file"||:)" +[ -z "${VERBOSE_PROGRESS:-}" ]&&for _ in 1 2;do _clear_line 1;done&&"${1:-:}" +return 0 +} +_normal_logging_upload(){ +[ -z "${VERBOSE_PROGRESS:-}" ]&&_clear_line 1 +"${QUIET:-_print_center}" "justify" "$slug_upload_file " "| $readable_size_upload_file | ${STRING:-}" "=" +return 0 +} +_log_upload_session(){ +[ "$inputsize_upload_file" -gt 1000000 ]&&printf "%s\n" "$uploadlink_upload_file" >|"$__file_upload_file" +return 0 +} +_remove_upload_session(){ +rm -f "$__file_upload_file" +return 0 +} +_full_upload(){ +_generate_upload_link||{ _error_logging_upload "$print_slug_upload_file" "$uploadlink_upload_file"||return 1;} +_log_upload_session +_upload_file_from_uri +_collect_file_info "$upload_body_upload_file" "$print_slug_upload_file"||return 1 +_normal_logging_upload +_remove_upload_session +return 0 +} +_share_id(){ +[ $# -lt 2 ]&&printf "Missing arguments\n"&&return 1 +id_share_id="$1" role_share_id="${2:?Missing role}" share_email_share_id="$3" role_share_id="reader" type_share_id="${share_email_share_id:+user}" +unset post_data_share_id response_share_id +"$EXTRA_LOG" "justify" "Sharing.." "-" 1>&2 +post_data_share_id="{\"role\":\"$role_share_id\",\"type\":\"${type_share_id:-anyone}\"${share_email_share_id:+,\"emailAddress\":\"$share_email_share_id\"}}" +response_share_id="$(_api_request "$CURL_PROGRESS_EXTRA" \ +-X POST \ +-H "Content-Type: application/json; charset=UTF-8" \ +-d "$post_data_share_id" \ +"$API_URL/drive/$API_VERSION/files/$id_share_id/permissions?supportsAllDrives=true&includeItemsFromAllDrives=true"||:)"&&_clear_line 1 1>&2 +_clear_line 1 1>&2 +{ printf "%s\n" "$response_share_id"|_json_value id 1 1 2>|/dev/null 1>&2&&return 0;}||{ printf "%s\n" "Error: Cannot Share." 1>&2&&printf "%s\n" "$response_share_id" 1>&2&&return 1;} +} +_api_request(){ +. "${TMPFILE:-}_ACCESS_TOKEN" +curl --compressed \ +-H "Authorization: Bearer ${ACCESS_TOKEN:-}" \ +"$@" +} +_collect_file_info(){ +json_collect_file_info="$1" info_collect_file_info="" +FILE_ID="$(printf "%s\n" "$json_collect_file_info"|_json_value id 1 1)"||{ _error_logging_upload "$2" "$json_collect_file_info"||return 1;} +{ [ -z "$LOG_FILE_ID" ]||[ -d "$LOG_FILE_ID" ];}&&return 0 +info_collect_file_info="Link: https://drive.google.com/open?id=$FILE_ID +Name: $(printf "%s\n" "$json_collect_file_info"|_json_value name 1 1||:) +ID: $FILE_ID +Type: $(printf "%s\n" "$json_collect_file_info"|_json_value mimeType 1 1||:)" +printf "%s\n\n" "$info_collect_file_info" >>"$LOG_FILE_ID" +return 0 +} +_error_logging_upload(){ +log_error_logging_upload="$2" +"${QUIET:-_print_center}" "justify" "Upload ERROR" ", ${1:-} not ${STRING:-uploaded}." "=" 1>&2 +case "$log_error_logging_upload" in +*'"message": "User rate limit exceeded."'*)printf "%s\n\n%s\n" "$log_error_logging_upload" \ +"Today's upload limit reached for this account. Use another account to upload or wait for tomorrow." \ +1>&2 +export RETRY=0 +;; +''|*)printf "%s\n" "$log_error_logging_upload" 1>&2 +esac +printf "\n\n\n" 1>&2 +return 1 +} +_get_rootdir_id(){ +file_gen_final_list="${1:?Error: give filename}" +rootdir_gen_final_list="$(_dirname "$file_gen_final_list")" +temp_gen_final_list="$(printf "%s\n" "${DIRIDS:?Error: DIRIDS Missing}"|grep -F "|:_//_:|$rootdir_gen_final_list|:_//_:|"||:)" +printf "%s\n" "${temp_gen_final_list%%"|:_//_:|$rootdir_gen_final_list|:_//_:|"}" +return 0 +} +_upload_file_main(){ +[ $# -lt 2 ]&&printf "Missing arguments\n"&&return 1 +file_upload_file_main="$2" sleep_upload_file_main=0 +{ [ "$1" = parse ]&&dirid_upload_file_main="$(_get_rootdir_id "$file_upload_file_main")";}||dirid_upload_file_main="$3" +retry_upload_file_main="${RETRY:-0}"&&unset RETURN_STATUS +until [ "$retry_upload_file_main" -le 0 ]&&[ -n "$RETURN_STATUS" ];do +if [ -n "$4" ];then +{ _upload_file "${UPLOAD_MODE:-create}" "$file_upload_file_main" "$dirid_upload_file_main" 2>|/dev/null 1>&2&&RETURN_STATUS=1&&break;}||RETURN_STATUS=2 +else +{ _upload_file "${UPLOAD_MODE:-create}" "$file_upload_file_main" "$dirid_upload_file_main"&&RETURN_STATUS=1&&break;}||RETURN_STATUS=2 +fi +[ "$((retry_upload_file_main-=1))" -lt 1 ]&&sleep "$((sleep_upload_file_main+=1))" +continue +done +[ -n "$4" ]&&{ +{ [ "$RETURN_STATUS" = 1 ]&&printf "%s\n" "$file_upload_file_main";}||printf "%s\n" "$file_upload_file_main" 1>&2 +} +return 0 +} +_upload_folder(){ +export VERBOSE VERBOSE_PROGRESS NO_OF_PARALLEL_JOBS TMPFILE NO_OF_FILES +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +mode_upload_folder="$1" PARSE_MODE="$2" files_upload_folder="$3" ID="${4:-}" +SUCCESS_STATUS=0 SUCCESS_FILES="" ERROR_STATUS=0 ERROR_FILES="" +case "$mode_upload_folder" in +normal)[ "$PARSE_MODE" = parse ]&&_clear_line 1&&_newline "\n" +while read -r file <&4;do +_upload_file_main "$PARSE_MODE" "$file" "$ID" +{ [ "$RETURN_STATUS" = 1 ]&&: "$((SUCCESS_STATUS+=1))"&&SUCCESS_FILES="$(printf "%b\n" "${SUCCESS_STATUS:+$SUCCESS_STATUS\n}$file")";}||{ : "$((ERROR_STATUS+=1))"&&ERROR_FILES="$(printf "%b\n" "${ERROR_STATUS:+$ERROR_STATUS\n}$file")";} +if [ -n "${VERBOSE:-$VERBOSE_PROGRESS}" ];then +_print_center "justify" "Status: $SUCCESS_STATUS Uploaded" " | $ERROR_STATUS Failed" "="&&_newline "\n" +else +for _ in 1 2;do _clear_line 1;done +_print_center "justify" "Status: $SUCCESS_STATUS Uploaded" " | $ERROR_STATUS Failed" "=" +fi +done 4<|"$TMPFILE"SUCCESS 2>|"$TMPFILE"ERROR)& +pid="$!" +until [ -f "$TMPFILE"SUCCESS ]||[ -f "$TMPFILE"ERORR ];do sleep 0.5;done +[ "$PARSE_MODE" = parse ]&&_clear_line 1 +_newline "\n" +until ! kill -0 "$pid" 2>|/dev/null 1>&2;do +SUCCESS_STATUS="$(($(wc -l <"$TMPFILE"SUCCESS)))" +ERROR_STATUS="$(($(wc -l <"$TMPFILE"ERROR)))" +sleep 1 +[ "$((SUCCESS_STATUS+ERROR_STATUS))" != "$TOTAL" ]&&_clear_line 1&&"${QUIET:-_print_center}" "justify" "Status" ": $SUCCESS_STATUS Uploaded | $ERROR_STATUS Failed" "=" +TOTAL="$((SUCCESS_STATUS+ERROR_STATUS))" +done +SUCCESS_STATUS="$(($(wc -l <"$TMPFILE"SUCCESS)))" SUCCESS_FILES="$(cat "$TMPFILE"SUCCESS)" +ERROR_STATUS="$(($(wc -l <"$TMPFILE"ERROR)))" ERROR_FILES="$(cat "$TMPFILE"ERROR)" +export SUCCESS_FILES ERROR_FILES +;; +*): +esac +return 0 +} +_cleanup_config(){ +config="${1:?Error: Missing config}"&&unset values_regex _tmp +! [ -f "$config" ]&&return 0 +while read -r line <&4&&[ -n "$line" ];do +expiry_value_name="${line%%=*}" +token_value_name="${expiry_value_name%%_EXPIRY}" +_tmp="${line##*=}"&&_tmp="${_tmp%\"}"&&expiry="${_tmp#\"}" +[ "$expiry" -le "$(_epoch)" ]&&values_regex="${values_regex:+$values_regex|}$expiry_value_name=\".*\"|$token_value_name=\".*\"" +done 4<|"$config"&&chmod "a-w-r-x,u+r" -- "$config" +return 0 +} +_setup_arguments(){ +[ $# = 0 ]&&printf "Missing arguments\n"&&return 1 +unset CONTINUE_WITH_NO_INPUT +export CURL_PROGRESS="-s" EXTRA_LOG=":" CURL_PROGRESS_EXTRA="-s" +INFO_PATH="$HOME/.google-drive-upload" CONFIG_INFO="$INFO_PATH/google-drive-upload.configpath" +[ -f "$CONFIG_INFO" ]&&. "$CONFIG_INFO" +CONFIG="${CONFIG:-$HOME/.googledrive.conf}" +unset ROOT_FOLDER CLIENT_ID CLIENT_SECRET REFRESH_TOKEN ACCESS_TOKEN +export API_URL="https://www.googleapis.com" +export API_VERSION="v3" \ +SCOPE="$API_URL/auth/drive" \ +REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" \ +TOKEN_URL="https://accounts.google.com/o/oauth2/token" +_parse_arguments "_parser_setup_flags" "$@"||return 1 +_check_debug +[ -n "$VERBOSE_PROGRESS" ]&&unset VERBOSE&&export CURL_PROGRESS="" +[ -n "$QUIET" ]&&export CURL_PROGRESS="-s" +mkdir -p "$INFO_PATH"||return 1 +[ -n "$DELETE_ACCOUNT_NAME" ]&&_delete_account "$DELETE_ACCOUNT_NAME" +[ -n "$LIST_ACCOUNTS" ]&&_all_accounts +[ -z "${INPUT_FILE_1:-${INPUT_ID_1:-$FOLDERNAME}}" ]&&{ +[ -z "${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-$NEW_ACCOUNT_NAME}}" ]&&_short_help +[ -n "${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-}}" ]&&exit 0 +[ -n "$NEW_ACCOUNT_NAME" ]&&CONTINUE_WITH_NO_INPUT="true" +} +[ -z "$CHECK_MODE" ]&&{ +case "${SKIP_DUPLICATES:-$OVERWRITE}" in +"Overwrite")export CHECK_MODE="1";; +"Skip Existing")export CHECK_MODE="2";; +*): +esac +} +return 0 +} +_setup_traps(){ +export SUPPORT_ANSI_ESCAPES TMPFILE ACCESS_TOKEN ACCESS_TOKEN_EXPIRY INITIAL_ACCESS_TOKEN ACCOUNT_NAME CONFIG ACCESS_TOKEN_SERVICE_PID +_cleanup(){ +[ -n "$SUPPORT_ANSI_ESCAPES" ]&&printf "\033[?25h\033[?7h" +{ +[ -f "${TMPFILE}_ACCESS_TOKEN" ]&&{ +. "${TMPFILE}_ACCESS_TOKEN" +[ "$INITIAL_ACCESS_TOKEN" = "$ACCESS_TOKEN" ]||{ +_update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN" "$ACCESS_TOKEN" "$CONFIG" +_update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN_EXPIRY" "$ACCESS_TOKEN_EXPIRY" "$CONFIG" +} +}||: 1>|/dev/null +[ -n "$ACCESS_TOKEN_SERVICE_PID" ]&&{ +token_service_pids="$(ps --ppid="$ACCESS_TOKEN_SERVICE_PID" -o pid=)" +kill "$ACCESS_TOKEN_SERVICE_PID" +}||: 1>|/dev/null +script_children_pids="$(ps --ppid="$MAIN_PID" -o pid=)" +kill $token_service_pids $script_children_pids 1>|/dev/null +rm -f "${TMPFILE:?}"* +export abnormal_exit&&if [ -n "$abnormal_exit" ];then +printf "\n\n%s\n" "Script exited manually." +kill "${_SCRIPT_KILL_SIGNAL:--9}" -$$& +else +{ _cleanup_config "$CONFIG"&&[ "${GUPLOAD_INSTALLED_WITH:-}" = script ]&&_auto_update;} 1>|/dev/null& +fi +} 2>|/dev/null||: +return 0 +} +trap 'abnormal_exit="1" ; exit' INT TERM +trap '_cleanup' EXIT +trap '' TSTP +export MAIN_PID="$$" +} +_setup_root_dir(){ +export ROOTDIR ROOT_FOLDER ROOT_FOLDER_NAME QUIET ACCOUNT_NAME CONFIG UPDATE_DEFAULT_ROOTDIR +_check_root_id(){ +_setup_root_dir_json="$(_drive_info "$(_extract_id "$ROOT_FOLDER")" "id")" +if ! rootid_setup_root_dir="$(printf "%s\n" "$_setup_root_dir_json"|_json_value id 1 1)";then +if printf "%s\n" "$_setup_root_dir_json"|grep "File not found" -q;then +"${QUIET:-_print_center}" "justify" "Given root folder" " ID/URL invalid." "=" 1>&2 +else +printf "%s\n" "$_setup_root_dir_json" 1>&2 +fi +return 1 +fi +ROOT_FOLDER="$rootid_setup_root_dir" +"${1:-:}" "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" "$ROOT_FOLDER" "$CONFIG"||return 1 +return 0 +} +_check_root_id_name(){ +ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "$ROOT_FOLDER")" "name"|_json_value name 1 1||:)" +"${1:-:}" "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER_NAME" "$ROOT_FOLDER_NAME" "$CONFIG"||return 1 +return 0 +} +_set_value indirect ROOT_FOLDER "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" +_set_value indirect ROOT_FOLDER_NAME "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER_NAME" +if [ -n "${ROOTDIR:-}" ];then +ROOT_FOLDER="$ROOTDIR"&&{ _check_root_id "$UPDATE_DEFAULT_ROOTDIR"||return 1;}&&unset ROOT_FOLDER_NAME +elif [ -z "$ROOT_FOLDER" ];then +{ [ -t 1 ]&&"${QUIET:-_print_center}" "normal" "Enter root folder ID or URL, press enter for default ( root )" " "&&printf -- "-> "&&read -r ROOT_FOLDER&&[ -n "$ROOT_FOLDER" ]&&{ _check_root_id _update_config||return 1;};}||{ +ROOT_FOLDER="root" +_update_config "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" "$ROOT_FOLDER" "$CONFIG"||return 1 +}&&printf "\n\n" +elif [ -z "$ROOT_FOLDER_NAME" ];then +_check_root_id_name _update_config||return 1 +fi +[ -z "$ROOT_FOLDER_NAME" ]&&{ _check_root_id_name "$UPDATE_DEFAULT_ROOTDIR"||return 1;} +return 0 +} +_setup_workspace(){ +export FOLDERNAME ROOT_FOLDER ROOT_FOLDER_NAME WORKSPACE_FOLDER_ID WORKSPACE_FOLDER_NAME +if [ -z "$FOLDERNAME" ];then +WORKSPACE_FOLDER_ID="$ROOT_FOLDER" +WORKSPACE_FOLDER_NAME="$ROOT_FOLDER_NAME" +else +WORKSPACE_FOLDER_ID="$(_create_directory "$FOLDERNAME" "$ROOT_FOLDER")"||{ printf "%s\n" "$WORKSPACE_FOLDER_ID" 1>&2&&return 1;} +WORKSPACE_FOLDER_NAME="$(_drive_info "$WORKSPACE_FOLDER_ID" name|_json_value name 1 1)"||{ printf "%s\n" "$WORKSPACE_FOLDER_NAME" 1>&2&&return 1;} +fi +return 0 +} +_process_arguments(){ +export SHARE SHARE_ROLE SHARE_EMAIL HIDE_INFO QUIET SKIP_DUPLICATES OVERWRITE \ +WORKSPACE_FOLDER_ID SOURCE_UTILS EXTRA_LOG SKIP_SUBDIRS INCLUDE_FILES EXCLUDE_FILES \ +QUIET PARALLEL_UPLOAD VERBOSE VERBOSE_PROGRESS CHECK_MODE DESCRIPTION DESCRIPTION_ALL \ +UPLOAD_MODE HIDE_INFO +_share_and_print_link(){ +"${SHARE:-:}" "${1:-}" "$SHARE_ROLE" "$SHARE_EMAIL" +[ -z "$HIDE_INFO" ]&&{ +_print_center "justify" "DriveLink" "${SHARE:+ (SHARED[$(printf "%.1s" "$SHARE_ROLE")])}" "-" +_support_ansi_escapes&&[ "$((COLUMNS))" -gt 45 ] 2>|/dev/null&&_print_center "normal" '^ ^ ^' ' ' +"${QUIET:-_print_center}" "normal" "https://drive.google.com/open?id=${1:-}" " " +} +return 0 +} +_SEEN="" index_process_arguments=0 +TOTAL_FILE_INPUTS="$((TOTAL_FILE_INPUTS<0?0:TOTAL_FILE_INPUTS))" +until [ "$index_process_arguments" -eq "$TOTAL_FILE_INPUTS" ];do +input="" +_set_value i input "INPUT_FILE_$((index_process_arguments+=1))" +case "$_SEEN" in +*"$input"*)continue;; +*)_SEEN="$_SEEN$input" +esac +if [ -f "$input" ];then +export DESCRIPTION_FILE="$DESCRIPTION" +_print_center "justify" "Given Input" ": FILE" "=" +_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "="&&_newline "\n" +_upload_file_main noparse "$input" "$WORKSPACE_FOLDER_ID" +if [ "${RETURN_STATUS:-}" = 1 ];then +_share_and_print_link "${FILE_ID:-}" +printf "\n" +else +for _ in 1 2;do _clear_line 1;done&&continue +fi +elif [ -d "$input" ];then +input="$(cd "$input"&&pwd)"||return 1 +unset EMPTY +export DESCRIPTION_FILE="${DESCRIPTION_ALL+:$DESCRIPTION}" +_print_center "justify" "Given Input" ": FOLDER" "-" +_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "="&&_newline "\n" +FOLDER_NAME="${input##*/}"&&"$EXTRA_LOG" "justify" "Folder: $FOLDER_NAME" "=" +NEXTROOTDIRID="$WORKSPACE_FOLDER_ID" +"$EXTRA_LOG" "justify" "Processing folder.." "-" +[ -z "$SKIP_SUBDIRS" ]&&"$EXTRA_LOG" "justify" "Indexing subfolders.." "-" +DIRNAMES="$(find "$input" -type d -not -empty)" +NO_OF_FOLDERS="$(($(printf "%s\n" "$DIRNAMES"|wc -l)))"&&NO_OF_SUB_FOLDERS="$((NO_OF_FOLDERS-1))" +[ -z "$SKIP_SUBDIRS" ]&&_clear_line 1 +[ "$NO_OF_SUB_FOLDERS" = 0 ]&&SKIP_SUBDIRS="true" +"$EXTRA_LOG" "justify" "Indexing files.." "-" +FILENAMES="$(_tmp='find "'$input'" -type f -name "*" '$INCLUDE_FILES' '$EXCLUDE_FILES''&&eval "$_tmp")" +_clear_line 1 +if [ -n "$SKIP_SUBDIRS" ];then +if [ -n "$FILENAMES" ];then +NO_OF_FILES="$(($(printf "%s\n" "$FILENAMES"|wc -l)))" +for _ in 1 2;do _clear_line 1;done +"${QUIET:-_print_center}" "justify" "Folder: $FOLDER_NAME " "| $NO_OF_FILES File(s)" "="&&printf "\n" +"$EXTRA_LOG" "justify" "Creating folder.." "-" +{ ID="$(_create_directory "$input" "$NEXTROOTDIRID")"&&export ID;}||{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-"&&printf "%s\n\n\n" "$ID" 1>&2&&continue;} +_clear_line 1&&DIRIDS="$ID" +[ -z "${PARALLEL_UPLOAD:-${VERBOSE:-$VERBOSE_PROGRESS}}" ]&&_newline "\n" +_upload_folder "${PARALLEL_UPLOAD:-normal}" noparse "$FILENAMES" "$ID" +[ -n "${PARALLEL_UPLOAD:+${VERBOSE:-$VERBOSE_PROGRESS}}" ]&&_newline "\n\n" +else +for _ in 1 2;do _clear_line 1;done&&EMPTY=1 +fi +else +if [ -n "$FILENAMES" ];then +NO_OF_FILES="$(($(printf "%s\n" "$FILENAMES"|wc -l)))" +for _ in 1 2;do _clear_line 1;done +"${QUIET:-_print_center}" "justify" "$FOLDER_NAME " "| $((NO_OF_FILES)) File(s) | $((NO_OF_SUB_FOLDERS)) Sub-folders" "=" +_newline "\n"&&"$EXTRA_LOG" "justify" "Creating Folder(s).." "-"&&_newline "\n" +unset status +while read -r dir <&4&&{ [ -n "$dir" ]||continue;};do +[ -n "$status" ]&&__dir="$(_dirname "$dir")"&&__temp="$(printf "%s\n" "$DIRIDS"|grep -F "|:_//_:|$__dir|:_//_:|")"&&NEXTROOTDIRID="${__temp%%"|:_//_:|$__dir|:_//_:|"}" +NEWDIR="${dir##*/}"&&_print_center "justify" "Name: $NEWDIR" "-" 1>&2 +ID="$(_create_directory "$NEWDIR" "$NEXTROOTDIRID")"||{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-"&&printf "%s\n\n\n" "$ID" 1>&2&&continue;} +DIRIDS="$(printf "%b%s|:_//_:|%s|:_//_:|\n" "${DIRIDS:+$DIRIDS\n}" "$ID" "$dir")" +for _ in 1 2;do _clear_line 1 1>&2;done +"$EXTRA_LOG" "justify" "Status" ": $((status+=1)) / $((NO_OF_FOLDERS))" "=" 1>&2 +done 4<>"$log_file_name" +printf "%s\n" "To see the failed files, open \"$log_file_name\"" +printf "%s\n" "To retry the failed uploads only, use -d / --skip-duplicates flag. See log file for more help." +} +else +printf "%s\n" "$ERROR_FILES" +fi +} +printf "\n" +else +for _ in 1 2 3;do _clear_line 1;done +"${QUIET:-_print_center}" 'justify' "Empty Folder" ": $FOLDER_NAME" "=" 1>&2 +printf "\n" +fi +fi +done +_SEEN="" index_process_arguments=0 +TOTAL_ID_INPUTS="$((TOTAL_ID_INPUTS<0?0:TOTAL_ID_INPUTS))" +until [ "$index_process_arguments" -eq "$TOTAL_ID_INPUTS" ];do +gdrive_id="" +_set_value gdrive_id "INPUT_ID_$((index_process_arguments+=1))" +case "$_SEEN" in +*"$gdrive_id"*)continue;; +*)_SEEN="$_SEEN$gdrive_id" +esac +_print_center "justify" "Given Input" ": ID" "=" +"$EXTRA_LOG" "justify" "Checking if id exists.." "-" +[ "$CHECK_MODE" = "md5Checksum" ]&¶m="md5Checksum" +json="$(_drive_info "$gdrive_id" "name,mimeType,size${param:+,$param}")"||: +if ! printf "%s\n" "$json"|_json_value code 1 1 2>|/dev/null 1>&2;then +type="$(printf "%s\n" "$json"|_json_value mimeType 1 1||:)" +name="$(printf "%s\n" "$json"|_json_value name 1 1||:)" +size="$(printf "%s\n" "$json"|_json_value size 1 1||:)" +[ "$CHECK_MODE" = "md5Checksum" ]&&md5="$(printf "%s\n" "$json"|_json_value md5Checksum 1 1||:)" +for _ in 1 2;do _clear_line 1;done +case "$type" in +*folder*)export \ +DESCRIPTION_FILE="${DESCRIPTION_ALL+:$DESCRIPTION}" +"${QUIET:-_print_center}" "justify" "Folder not supported." "=" 1>&2&&_newline "\n" 1>&2&&continue +;; +*)export \ +DESCRIPTION_FILE="$DESCRIPTION" +_print_center "justify" "Given Input" ": File ID" "=" +_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "="&&_newline "\n" +_clone_file "${UPLOAD_MODE:-create}" "$gdrive_id" "$WORKSPACE_FOLDER_ID" "$name" "$size" "$md5"||{ for _ in 1 2;do _clear_line 1;done&&continue;} +esac +_share_and_print_link "$FILE_ID" +printf "\n" +else +_clear_line 1 +"${QUIET:-_print_center}" "justify" "File ID (${HIDE_INFO:-gdrive_id})" " invalid." "=" 1>&2 +printf "\n" +fi +done +return 0 +} +_main_helper(){ +_setup_arguments "$@"||exit 1 +"${SKIP_INTERNET_CHECK:-_check_internet}"||exit 1 +TMPFILE="$(command -v mktemp 1>|/dev/null&&mktemp -u)"||TMPFILE="$(pwd)/.$(_t="$(_epoch)"&&printf "%s\n" "$((_t*_t))").tmpfile" +export TMPFILE +_setup_traps +"$EXTRA_LOG" "justify" "Checking credentials.." "-" +{ _check_credentials&&_clear_line 1;}||{ "${QUIET:-_print_center}" "normal" "[ Error: Credentials checking failed ]" "="&&exit 1;} +"${QUIET:-_print_center}" "normal" " Account: $ACCOUNT_NAME " "=" +"$EXTRA_LOG" "justify" "Checking root dir.." "-" +{ _setup_root_dir&&_clear_line 1;}||{ "${QUIET:-_print_center}" "normal" "[ Error: Rootdir setup failed ]" "="&&exit 1;} +_print_center "justify" "Root dir properly configured." "=" +[ -n "$CONTINUE_WITH_NO_INPUT" ]&&exit 0 +"$EXTRA_LOG" "justify" "Checking Workspace Folder.." "-" +{ _setup_workspace&&for _ in 1 2;do _clear_line 1;done;}||{ "${QUIET:-_print_center}" "normal" "[ Error: Workspace setup failed ]" "="&&exit 1;} +_print_center "justify" "Workspace Folder: $WORKSPACE_FOLDER_NAME" "=" +"${HIDE_INFO:-_print_center}" "normal" " $WORKSPACE_FOLDER_ID " "-"&&_newline "\n" +START="$(_epoch)" +[ -n "$SUPPORT_ANSI_ESCAPES" ]&&printf "\033[?25l" +_process_arguments +END="$(_epoch)" +DIFF="$((END-START))" +"${QUIET:-_print_center}" "normal" " Time Elapsed: ""$((DIFF/60))"" minute(s) and ""$((DIFF%60))"" seconds. " "=" +} +set +a +main(){ +[[ $# == 0 ]]&&{ +printf "No valid arguments provided, use -h/--help flag to see usage.\n" +exit 0 +} +[[ -z $SELF_SOURCE ]]&&{ +set -a +export UTILS_FOLDER="${UTILS_FOLDER:-$PWD}" +export COMMON_PATH="$UTILS_FOLDER/common" +{ . "$UTILS_FOLDER/bash/common-utils.bash"&&. "$COMMON_PATH/parser.sh"&&. "$COMMON_PATH/upload-flags.sh"&&. "$COMMON_PATH/auth-utils.sh"&&. "$COMMON_PATH/common-utils.sh"&&. "$COMMON_PATH/drive-utils.sh"&&. "$COMMON_PATH/upload-utils.sh"&&. "$COMMON_PATH/upload-common.sh";}||{ printf "Error: Unable to source util files.\n"&&exit 1;} +set +a +} +export SOURCE_UTILS="" +[[ ${BASH_VERSINFO:-0} -ge 4 ]]||{ printf "Bash version lower than 4.x not supported.\n"&&return 1;} +set -o noclobber -o pipefail||exit 1 +export _SCRIPT_KILL_SIGNAL="--" +_main_helper "$@"||exit 1 +} +{ [[ -z $SOURCED_GUPLOAD ]]&&main "$@";}||: diff --git a/release/sh/gsync b/release/sh/gsync new file mode 100755 index 0000000..edcac7e --- /dev/null +++ b/release/sh/gsync @@ -0,0 +1,1003 @@ +#!/usr/bin/env sh +SELF_SOURCE="true" +set -a +_assert_regex(){ +grep -qE "${1:?Error: Missing pattern}" 0<&1';}||{ command -v zsh 1>|/dev/null&&zsh -c 'printf "%s\n" "${COLUMNS}"';}||{ command -v stty 1>|/dev/null&&_tmp="$(stty size)"&&printf "%s\n" "${_tmp##* }";}||{ command -v tput 1>|/dev/null&&tput cols;})"||: +[ "$((COLUMNS))" -gt 45 ]&&return 0 +} +_set_value(){ +case "${1:?}" in +d|direct)export "${2:?}=$3";; +i|indirect)eval export "$2"=\"\$"$3"\";; +*)return 1 +esac +} +_url_encode()(\ +LC_ALL=C \ +LANG=C +awk 'BEGIN {while (y++ < 125) z[sprintf("%c", y)] = y + while (y = substr(ARGV[1], ++j, 1)) + q = y ~ /[[:alnum:]]_.!~*\47()-]/ ? q y : q sprintf("%%%02X", z[y]) + print q}' "$1") +_is_fd_open(){ +for fd in ${1:?};do +if ! { true >&"$fd";} 2<>/dev/null;then +printf "%s\n" "Error: fd $fd not open." +return 1 +fi +done +} +_parser_add_help(){ +_PARSER_ALL_HELP="$_PARSER_ALL_HELP +${__PARSER_BAR:-} +${1:-}" 2>|/dev/null +} +_parser_check_arguments(){ +nargs_parser_check_arguments="$((${1:?_parser_check_arguments}))" +num_parser_check_arguments=$(($#-2)) +[ "$num_parser_check_arguments" -lt "$nargs_parser_check_arguments" ]&&{ +printf "%s\n" "${0##*/}: $2: flag requires $nargs_parser_check_arguments argument." +printf "\n%s\n" "Help:" +printf "%s\n" "$(_usage "$2")" +exit 1 +} +return 0 +} +_flag_exists(){ +tmp_flag_exists="" option_flag_exists="" +_flag_help "${1:?}" tmp_flag_exists option_flag_exists +[ -z "$tmp_flag_exists" ]&&return 1 +_set_value d "${2:?}" "$option_flag_exists" +} +_flag_help(){ +flag_flag_help="" +_trim "-" "${1:?_flag_help}" flag_flag_help +_set_value i "${2:?_flag_help}" "_parser__help_$flag_flag_help" +_set_value d "${3:-_}" "$flag_flag_help" +} +_parse_arguments(){ +__NEWLINE=" +" +_parse_support_ansi_escapes(){ +case "$TERM" in +xterm*|rxvt*|urxvt*|linux*|vt*|screen*){ [ -t 2 ]&&return 0;}||return 1;; +*): +esac +{ [ -t 2 ]&&return 0;}||return 1 +} +_parser_required_column_size(){ +COLUMNS="$({ command -v bash 1>|/dev/null&&bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1';}||{ command -v zsh 1>|/dev/null&&zsh -c 'printf "%s\n" "${COLUMNS}"';}||{ command -v stty 1>|/dev/null&&_tmp="$(stty size)"&&printf "%s\n" "${_tmp##* }";}||{ command -v tput 1>|/dev/null&&tput cols;})"||: +[ "$((COLUMNS))" -gt 45 ]&&return 0 +} +_parse_support_ansi_escapes&&_parser_required_column_size&&__PARSER_BAR="$(\ +filler='' \ +symbol='_' +i=1&&while [ "$i" -le "$COLUMNS" ];do +filler="$filler$symbol"&&i="$((i+1))" +done +printf "%s\n" "$filler")" +__PARSER_BAR="${__PARSER_BAR:+$__PARSER_BAR$__NEWLINE}" +unset _PARSER_ALL_HELP _PARSER_ARGS_SHIFT _PARSER_PREPROCESS_FUNCTION +unset _PARSER_FLAGS _PARSER_CURRENT_FLAGS _PARSER_CURRENT_NARGS _PARSER_CURRENT_ARGS _PARSER_CURRENT_ARGS_TYPE +"${1:?_parse_arguments - 1: Missing funtion name to setup flags}"||return 1 +shift 2>|/dev/null +_parser_run_preprocess||return 1 +while [ "$#" -gt 0 ];do +case "$1" in +''):;; +--)shift +while [ "$#" -gt 0 ];do +_parser_process_input "$@"||return 1 +shift +done +;; +-*)\ +flag_parse_arguments="" +if _flag_exists "$1" flag_parse_arguments;then +"_parser_process_$flag_parse_arguments" "$@"||return 1 +else +printf "%s\n\n" "${0##*/}: $1: Unknown option" +_short_help +fi +;; +*)_parser_process_input "$@"||return 1 +esac +_PARSER_ARGS_SHIFT="$((_PARSER_ARGS_SHIFT+1))" +shift "$_PARSER_ARGS_SHIFT" +_PARSER_ARGS_SHIFT="0" +done +return 0 +} +_parser_setup_flag(){ +_PARSER_CURRENT_FLAGS="" tmp_parser_setup_flag="" +_PARSER_FLAGS="${1:?_parser_setup_flag}" +for f in $_PARSER_FLAGS;do +_trim "-" "$f" tmp_parser_setup_flag +_PARSER_CURRENT_FLAGS="$_PARSER_CURRENT_FLAGS $tmp_parser_setup_flag" +done +_PARSER_CURRENT_NARGS="${2:?_parser_setup_flag}" +_PARSER_CURRENT_ARGS_TYPE="$3" +_PARSER_CURRENT_ARGS="$4" +} +_parser_setup_flag_help(){ +flags_parser_setup_flag_help="${_PARSER_CURRENT_FLAGS:?_parser_setup_flag_help}" +nargs_parser_setup_flag_help="${_PARSER_CURRENT_NARGS:?_parser_setup_flag_help}" +unset start_parser_setup_flag_help \ +help_parser_setup_flag_help \ +arg_parser_setup_flag_help \ +all_parser_setup_flag_help +while IFS= read -r line <&4;do +help_parser_setup_flag_help="$help_parser_setup_flag_help + $line" +done 4<|/dev/null +} +_parser_setup_flag_preprocess(){ +_is_fd_open 4||return 1 +unset fn_parser_setup_flag_preprocess +while IFS= read -r line <&4;do +fn_parser_setup_flag_preprocess="$fn_parser_setup_flag_preprocess +$line" +done +_PARSER_PREPROCESS_FUNCTION="$_PARSER_PREPROCESS_FUNCTION +$fn_parser_setup_flag_preprocess" +} +_parser_setup_flag_process(){ +_is_fd_open 4||return 1 +unset fn_parser_setup_flag_process +if [ "${_PARSER_CURRENT_NARGS:?_parser_setup_flag_process}" -gt 0 ]&&! [ "$_PARSER_CURRENT_ARGS_TYPE" = optional ];then +fn_parser_setup_flag_process="_parser_check_arguments ${_PARSER_CURRENT_NARGS:?_parser_setup_flag_process} \"\${@}\"" +fi +while IFS= read -r line <&4;do +fn_parser_setup_flag_process="$fn_parser_setup_flag_process +$line" +done +for f in ${_PARSER_CURRENT_FLAGS:?_parser_setup_flag_process};do +eval "_parser_process_$f() { $fn_parser_setup_flag_process ; }" +done +} +_parser_run_preprocess(){ +eval "_parser_preprocess_setup() { ${_PARSER_PREPROCESS_FUNCTION:-:} ; }"&&_parser_preprocess_setup +} +_parser_shift(){ +export _PARSER_ARGS_SHIFT="${1:-1}" +} +_short_help(){ +printf "No valid arguments provided, use -h/--help flag to see usage.\n" +exit 0 +} +_set_value(){ +case "${1:?}" in +d|direct)export "${2:?}=$3";; +i|indirect)eval export "$2"=\"\$"$3"\";; +*)return 1 +esac +} +_trim(){ +char_trim="$1" str_trim="$2" var_trim="$3" +set -f +old_ifs="$IFS" +IFS="$char_trim" +set -- $str_trim +IFS= +if [ -n "$var_trim" ];then +_set_value d "$var_trim" "$*" +else +printf "%s" "$*" +fi +IFS="$old_ifs" +set +f +} +_bytes_to_human(){ +b_bytes_to_human="$(printf "%.0f\n" "${1:-0}")" s_bytes_to_human=0 +d_bytes_to_human='' type_bytes_to_human='' +while [ "$b_bytes_to_human" -gt 1024 ];do +d_bytes_to_human="$(printf ".%02d" $((b_bytes_to_human%1024*100/1024)))" +b_bytes_to_human=$((b_bytes_to_human/1024))&&s_bytes_to_human=$((s_bytes_to_human+=1)) +done +j=0&&for i in B KB MB GB TB PB EB YB ZB;do +j="$((j+=1))"&&[ "$((j-1))" = "$s_bytes_to_human" ]&&type_bytes_to_human="$i"&&break +continue +done +printf "%s\n" "$b_bytes_to_human$d_bytes_to_human $type_bytes_to_human" +} +_check_debug(){ +export DEBUG QUIET +if [ -n "$DEBUG" ];then +set -x&&PS4='-> ' +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "$2";}||{ printf "%s%s\n" "$2" "$3";};} +_clear_line(){ :;}&&_move_cursor(){ :;}&&_newline(){ :;} +else +if [ -z "$QUIET" ];then +if _support_ansi_escapes;then +if ! _required_column_size;then +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "[ $2 ]";}||{ printf "%s\n" "[ $2$3 ]";};} +fi +export EXTRA_LOG="_print_center" CURL_PROGRESS="-#" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" +else +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "[ $2 ]";}||{ printf "%s\n" "[ $2$3 ]";};} +_clear_line(){ :;}&&_move_cursor(){ :;} +fi +_newline(){ printf "%b" "$1";} +else +_print_center(){ :;}&&_clear_line(){ :;}&&_move_cursor(){ :;}&&_newline(){ :;} +fi +set +x +fi +} +_check_internet(){ +"${EXTRA_LOG:-}" "justify" "Checking Internet Connection.." "-" +if ! _timeout 10 curl -Is google.com --compressed;then +_clear_line 1 +"${QUIET:-_print_center}" "justify" "Error: Internet connection" " not available." "=" +return 1 +fi +_clear_line 1 +} +_clear_line(){ +printf "\033[%sA\033[2K" "$1" +} +_dirname(){ +dir_dirname="${1:-.}" +dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}"&&[ -n "${dir_dirname##*/*}" ]&&dir_dirname=. +dir_dirname="${dir_dirname%/*}"&&dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" +printf '%s\n' "${dir_dirname:-/}" +} +_display_time(){ +t_display_time="$1" day_display_time="$((t_display_time/60/60/24))" +hr_display_time="$((t_display_time/60/60%24))" min_display_time="$((t_display_time/60%60))" sec_display_time="$((t_display_time%60))" +[ "$day_display_time" -gt 0 ]&&printf '%d days ' "$day_display_time" +[ "$hr_display_time" -gt 0 ]&&printf '%d hrs ' "$hr_display_time" +[ "$min_display_time" -gt 0 ]&&printf '%d minute(s) ' "$min_display_time" +[ "$day_display_time" -gt 0 ]||[ "$hr_display_time" -gt 0 ]||[ "$min_display_time" -gt 0 ]&&printf 'and ' +printf '%d seconds\n' "$sec_display_time" +} +_get_latest_sha(){ +export TYPE TYPE_VALUE REPO +unset latest_sha_get_latest_sha raw_get_latest_sha +case "${1:-$TYPE}" in +branch)\ +latest_sha_get_latest_sha="$(\ +raw_get_latest_sha="$(curl --compressed -s https://github.com/"${3:-$REPO}"/commits/"${2:-$TYPE_VALUE}".atom -r 0-2000)" +_tmp="$(printf "%s\n" "$raw_get_latest_sha"|grep -o 'Commit\/.*<' -m1||:)"&&_tmp="${_tmp##*\/}"&&printf "%s\n" "${_tmp%%<*}")" +;; +release)\ +latest_sha_get_latest_sha="$(\ +raw_get_latest_sha="$(curl -L --compressed -s https://github.com/"${3:-$REPO}"/releases/"${2:-$TYPE_VALUE}")" +_tmp="$(printf "%s\n" "$raw_get_latest_sha"|grep '="/'"${3:-$REPO}""/commit" -m1||:)"&&_tmp="${_tmp##*commit\/}"&&printf "%s\n" "${_tmp%%\"*}")" +;; +*): +esac +printf "%b" "${latest_sha_get_latest_sha:+$latest_sha_get_latest_sha\n}" +} +_json_escape(){ +mode_json_escape="${1:?Missing mode}" input_json_escape="${2:?Provide Input}" output_json_escape="" +if [ "$mode_json_escape" = "j" ];then +output_json_escape="$(printf "%s" "$input_json_escape"|sed \ +-e "s|\\\|\\\\\\\|g" \ +-e "s|\/|\\\/|g" \ +-e 's/\"/\\\"/g' \ +-e "s/$(printf '\t')/\\t/g" \ +-e "s/$(printf '\r')/\\r/g" \ +-e "s/$(printf '\f')/\\f/g")" +else +output_json_escape="$(printf "%s" "$input_json_escape"|sed \ +-e "s/$(printf '\t')/\\t/g" \ +-e "s/$(printf '\r')/\\r/g" \ +-e "s/$(printf '\f')/\\f/g")" +fi +output_json_escape="$(printf "%s" "$output_json_escape"|awk '{printf "%s%s",sep,$0; sep="\\n"} END{print ""}')" +printf "%s" "$output_json_escape" +} +_json_value(){ +{ [ "$2" -gt 0 ] 2>|/dev/null&&no_of_lines_json_value="$2";}||: +{ [ "$3" -gt 0 ] 2>|/dev/null&&num_json_value="$3";}||{ ! [ "$3" = all ]&&num_json_value=1;} +_tmp="$(grep -o "\"$1\"\:.*" ${no_of_lines_json_value:+-m} $no_of_lines_json_value)"||return 1 +printf "%s\n" "$_tmp"|sed -e 's|.*"'"$1""\":||" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "$num_json_value"p||: +return 0 +} +_parse_config(){ +_config_file_parse_config="${1:?Error: Profile config file}" +print_parse_config="${2:-false}" +[ -r "$_config_file_parse_config" ]||{ +printf "%s\n" "Error: Given config file ( $_config_file_parse_config ) is not readable." +return 1 +} +while IFS='=' read -r key val;do +{ [ -n "$key" ]&&[ -n "$val" ]&&[ -n "${key##\#*}" ];}||continue +key="${key#"${key%%[![:space:]]*}"}" +val="${val#"${val%%[![:space:]]*}"}" +key="${key%"${key##*[![:space:]]}"}" +val="${val%"${val##*[![:space:]]}"}" +case "$val" in +\"*\")val="${val#\"}" val="${val%\"}";; +\'*\')val="${val#\'}" val="${val%\'}";; +*): +esac +export "$key=$val" 2>/dev/null||printf "%s\n" "Warning: $key is not a valid variable name." +[ "$print_parse_config" = true ]&&echo "$key=$val" +done <"$_config_file_parse_config" +return 0 +} +_print_center(){ +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +term_cols_print_center="${COLUMNS:-}" +type_print_center="$1" filler_print_center="" +case "$type_print_center" in +normal)out_print_center="$2"&&symbol_print_center="$3";; +justify)if +[ $# = 3 ] +then +input1_print_center="$2" symbol_print_center="$3" to_print_print_center="" out_print_center="" +to_print_print_center="$((term_cols_print_center-5))" +{ [ "${#input1_print_center}" -gt "$to_print_print_center" ]&&out_print_center="[ $(printf "%.${to_print_print_center}s\n" "$input1_print_center")..]";}||{ out_print_center="[ $input1_print_center ]";} +else +input1_print_center="$2" input2_print_center="$3" symbol_print_center="$4" to_print_print_center="" temp_print_center="" out_print_center="" +to_print_print_center="$((term_cols_print_center*47/100))" +{ [ "${#input1_print_center}" -gt "$to_print_print_center" ]&&temp_print_center=" $(printf "%.${to_print_print_center}s\n" "$input1_print_center")..";}||{ temp_print_center=" $input1_print_center";} +to_print_print_center="$((term_cols_print_center*46/100))" +{ [ "${#input2_print_center}" -gt "$to_print_print_center" ]&&temp_print_center="$temp_print_center$(printf "%.${to_print_print_center}s\n" "$input2_print_center").. ";}||{ temp_print_center="$temp_print_center$input2_print_center ";} +out_print_center="[$temp_print_center]" +fi +;; +*)return 1 +esac +str_len_print_center="${#out_print_center}" +[ "$str_len_print_center" -ge "$((term_cols_print_center-1))" ]&&{ +printf "%s\n" "$out_print_center"&&return 0 +} +filler_print_center_len="$(((term_cols_print_center-str_len_print_center)/2))" +i_print_center=1&&while [ "$i_print_center" -le "$filler_print_center_len" ];do +filler_print_center="$filler_print_center$symbol_print_center"&&i_print_center="$((i_print_center+1))" +done +printf "%s%s%s" "$filler_print_center" "$out_print_center" "$filler_print_center" +[ "$(((term_cols_print_center-str_len_print_center)%2))" -ne 0 ]&&printf "%s" "$symbol_print_center" +printf "\n" +return 0 +} +_print_center_quiet(){ +{ [ $# = 3 ]&&printf "%s\n" "$2";}||{ printf "%s%s\n" "$2" "$3";} +} +_support_ansi_escapes(){ +unset ansi_escapes +case "${TERM:-}" in +xterm*|rxvt*|urxvt*|linux*|vt*|screen*)ansi_escapes="true";; +*): +esac +{ [ -t 2 ]&&[ -n "$ansi_escapes" ]&&return 0;}||return 1 +} +_timeout(){ +timeout_timeout="${1:?Error: Specify Timeout}"&&shift +{ +"$@"& +child="$!" +trap -- "" TERM +{ +sleep "$timeout_timeout" +kill -9 "$child" +}& +wait "$child" +} 2>|/dev/null 1>&2 +} +_update_config(){ +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +value_name_update_config="$1" value_update_config="$2" config_path_update_config="$3" +! [ -f "$config_path_update_config" ]&&: >|"$config_path_update_config" +chmod u+w -- "$config_path_update_config"||return 1 +printf "%s\n%s\n" "$(grep -v -e "^$" -e "^$value_name_update_config=" -- "$config_path_update_config"||:)" \ +"$value_name_update_config=\"$value_update_config\"" >|"$config_path_update_config"||return 1 +chmod a-w-r-x,u+r -- "$config_path_update_config"||return 1 +return 0 +} +set +a +_usage(){ +printf "%b" " +The script can be used to sync your local folder to google drive. + +Utilizes google-drive-upload bash scripts.\n +Usage: ${0##*/} [options.. ]\n +Options:\n + -d | --directory - Gdrive foldername.\n + -k | --kill - to kill the background job using pid number ( -p flags ) or used with input, can be used multiple times.\n + -j | --jobs - See all background jobs that were started and still running.\n + Use --jobs v/verbose to more information for jobs.\n + -p | --pid - Specify a pid number, used for --jobs or --kill or --info flags, can be used multiple times.\n + -i | --info - See information about a specific sync using pid_number ( use -p flag ) or use with input, can be used multiple times.\n + -t | --time - Amount of time to wait before try to sync again in background.\n + To set wait time by default, use ${0##*/} -t default='3'. Replace 3 with any positive integer.\n + -l | --logs - To show the logs after starting a job or show log of existing job. Can be used with pid number ( -p flag ). + Note: If multiple pid numbers or inputs are used, then will only show log of first input as it goes on forever. + -a | --arguments - Additional arguments for gupload commands. e.g: ${0##*/} -a '-q -o -p 4 -d'.\n + To set some arguments by default, use ${0##*/} -a default='-q -o -p 4 -d'.\n + -fg | --foreground - This will run the job in foreground and show the logs.\n + -in | --include 'pattern' - Only include the files with the given pattern to upload.\n + e.g: ${0##*/} local_folder --include "*1*", will only include with files with pattern '1' in the name.\n + -ex | --exclude 'pattern' - Exclude the files with the given pattern from uploading.\n + e.g: ${0##*/} local_folder --exclude "*1*", will exclude all files with pattern '1' in the name.\n + -c | --command 'command name'- Incase if gupload command installed with any other name or to use in systemd service.\n + --sync-detail-dir 'dirname' - Directory where a job information will be stored. + Default: $HOME/.google-drive-upload\n + -s | --service 'service name' - To generate systemd service file to setup background jobs on boot.\n + -D | --debug - Display script command trace, use before all the flags to see maximum script trace.\n + -h | --help - Display usage instructions.\n" +exit 0 +} +_short_help(){ +printf "No valid arguments provided, use -h/--help flag to see usage.\n" +exit 0 +} +_check_pid(){ +{ ps -p "$1" 2>|/dev/null 1>&2&&return 0;}||return 1 +} +_get_job_info(){ +unset local_folder_get_job_info times_get_job_info extra_get_job_info +pid_get_job_info="$1"&&input_get_job_info="${3:-$(grep "$pid_get_job_info" "$SYNC_LIST"||:)}" +if [ -n "$input_get_job_info" ];then +if times_get_job_info="$(ps -p "$pid_get_job_info" -o etimes --no-headers)";then +printf "\n%s\n" "PID: $pid_get_job_info" +_tmp="${input_get_job_info#*"|:_//_:|"}"&&local_folder_get_job_info="${_tmp%%"|:_//_:|"*}" +printf "Local Folder: %s\n" "$local_folder_get_job_info" +printf "Drive Folder: %s\n" "${input_get_job_info##*"|:_//_:|"}" +printf "Running Since: %s\n" "$(_display_time "$times_get_job_info")" +[ -n "$2" ]&&{ +extra_get_job_info="$(ps -p "$pid_get_job_info" -o %cpu,%mem --no-headers||:)" +printf "CPU usage:%s\n" "${extra_get_job_info% *}" +printf "Memory usage: %s\n" "${extra_get_job_info##* }" +_setup_loop_variables "$local_folder_get_job_info" "${input_get_job_info##*"|:_//_:|"}" +printf "Success: %s\n" "$(($(wc -l <"$SUCCESS_LOG")))" +printf "Failed: %s\n" "$(($(wc -l <"$ERROR_LOG")))" +} +RETURN_STATUS=0 +else +RETURN_STATUS=1 +fi +else +RETURN_STATUS=11 +fi +return 0 +} +_remove_job(){ +unset input_remove_job local_folder_remove_job drive_folder_remove_job new_list_remove_job +pid_remove_job="$1" +if [ -n "$pid_remove_job" ];then +input_remove_job="$(grep "$pid_remove_job" "$SYNC_LIST"||:)" +_tmp="${input_remove_job#*"|:_//_:|"}"&&local_folder_remove_job="${_tmp%%"|:_//_:|"*}" +drive_folder_remove_job="${input_remove_job##*"|:_//_:|"}" +new_list_remove_job="$(grep -v "$pid_remove_job" "$SYNC_LIST"||:)" +printf "%s\n" "$new_list_remove_job" >|"$SYNC_LIST" +fi +rm -rf "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-$2}${local_folder_remove_job:-$3}" +{ [ -z "$(find "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-$2}" -type f)" ]&&rm -rf "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-$2}";} 2>|/dev/null 1>&2 +return 0 +} +_kill_job(){ +pid_kill_job="$1" +kill -9 "$pid_kill_job" 2>|/dev/null 1>&2||: +_remove_job "$pid_kill_job" +printf "Killed.\n" +} +_show_jobs(){ +unset list_show_job pid_show_job no_task_show_job +total_show_job=0 list_show_job="$(grep -v '^$' "$SYNC_LIST"||:)" +printf "%s\n" "$list_show_job" >|"$SYNC_LIST" +while read -r line <&4;do +if [ -n "$line" ];then +_tmp="${line%%"|:_//_:|"*}"&&pid_show_job="${_tmp##*: }" +_get_job_info "$pid_show_job" "$1" "$line" +if [ "$RETURN_STATUS" = 1 ];then +_remove_job "$pid_show_job" +else +total_show_job="$((total_show_job+1))"&&no_task_show_job="printf" +fi +fi +done 4<"$SYNC_LIST" +printf "\nTotal Jobs Running: %s\n" "$total_show_job" +[ -z "$1" ]&&"${no_task_show_job:-:}" "For more info: %s -j/--jobs v/verbose\n" "${0##*/}" +return 0 +} +_setup_loop_variables(){ +folder_setup_loop_variables="$1" drive_folder_setup_loop_variables="$2" +DIRECTORY="$SYNC_DETAIL_DIR/$drive_folder_setup_loop_variables$folder_setup_loop_variables" +PID_FILE="$DIRECTORY/pid" +SUCCESS_LOG="$DIRECTORY/success_list" +ERROR_LOG="$DIRECTORY/failed_list" +LOGS="$DIRECTORY/logs" +} +_setup_loop_files(){ +mkdir -p "$DIRECTORY" +for file in PID_FILE SUCCESS_LOG ERROR_LOG;do +printf "" >>"$(eval printf "%s" \"\$"$file"\")" +done +PID="$(cat "$PID_FILE")" +} +_check_and_upload(){ +unset all_check_and_upload initial_check_and_upload new_files_check_and_upload new_file_check_and_upload aseen_check_and_upload +initial_check_and_upload="$(cat "$SUCCESS_LOG")" +all_check_and_upload="$(cat "$SUCCESS_LOG" "$ERROR_LOG")" +[ "$(printf "%b\n" ./*)" = "./*" ]&&return 0 +all_check_and_upload="$all_check_and_upload +$(_tmp='printf -- "%b\n" * '${INCLUDE_FILES:+| grep -E $INCLUDE_FILES}''&&eval "$_tmp")" +exec 5<|"$ERROR_LOG"&&{ +while read -r new_file_check_and_upload <&4&&case "$aseen_check_and_upload" in +*"|:_//_:|$new_file_check_and_upload|:_//_:|"*)continue;; +*)aseen_check_and_upload="$aseen_check_and_upload|:_//_:|$new_file_check_and_upload|:_//_:|" +esac;do +if eval "\"$COMMAND_PATH\"" "\"$new_file_check_and_upload\"" "$ARGS";then +printf "%s\n" "$new_file_check_and_upload" >>"$SUCCESS_LOG" +else +printf "%s\n" "$new_file_check_and_upload" >>"$ERROR_LOG" +printf "%s\n" "Error: Input - $new_file_check_and_upload" +fi +printf "\n" +done 4<|"$LOGS" 1>&2)& +PID="$!" +printf "%s\n" "$PID" >|"$PID_FILE" +printf "%b\n" "Local Folder: $INPUT\nDrive Folder: $GDRIVE_FOLDER\nPID: $PID" +printf "%b\n" "PID: $PID|:_//_:|$FOLDER|:_//_:|$GDRIVE_FOLDER" >>"$SYNC_LIST" +[ -n "$SHOW_LOGS" ]&&printf "\n"&&tail -f "$LOGS" +fi +return 0 +} +_do_job(){ +case "$JOB" in +*SHOW_JOBS*)_show_jobs "${SHOW_JOBS_VERBOSE:-}" +exit +;; +*KILL_ALL*)\ +PIDS="$(_show_jobs|grep -o 'PID:.*[0-9]'|sed "s/PID: //g"||:)"&&\ +total=0 +[ -n "$PIDS" ]&&{ +for _pid in $PIDS;do +printf "PID: %s - " "${_pid##* }" +_kill_job "${_pid##* }" +total="$((total+1))" +done +} +printf "\nTotal Jobs Killed: %s\n" "$total" +exit +;; +*PIDS*)unset Aseen&&while +read -r pid <&4&&{ [ -n "$pid" ]||continue;}&&case "$Aseen" in +*"|:_//_:|$pid|:_//_:|"*)continue;; +*)Aseen="$Aseen|:_//_:|$pid|:_//_:|" +esac +do +case "$JOB_TYPE" in +*INFO*)_get_job_info "$pid" more +[ "$RETURN_STATUS" -gt 0 ]&&{ +[ "$RETURN_STATUS" = 1 ]&&_remove_job "$pid" +printf "No job running with given PID ( %s ).\n" "$pid" 1>&2 +} +;; +*) +esac +case "$JOB_TYPE" in +*SHOW_LOGS*)\ +input="$(grep "$pid" "$SYNC_LIST"||:)" +if [ -n "$input" ];then +_check_pid "$pid"&&{ +_tmp="${input#*"|:_//_:|"}"&&local_folder="${_tmp%%"|:_//_:|"*/}" +_setup_loop_variables "$local_folder" "${input##*"|:_//_:|"/}" +tail -f "$LOGS" +} +else +printf "No job running with given PID ( %s ).\n" "$pid" 1>&2 +fi +;; +*) +esac +case "$JOB_TYPE" in +*KILL*)_get_job_info "$pid" +if [ "$RETURN_STATUS" = 0 ];then +_kill_job "$pid" +else +[ "$RETURN_STATUS" = 1 ]&&_remove_job "$pid" +printf "No job running with given PID ( %s ).\n" "$pid" 1>&2 +fi +;; +*) +esac +done 4<|/dev/null 1>&2;then +ALL_PIDS="$ALL_PIDS + $2"&&shift +JOB=" $JOBS PIDS " +else +printf "%s\n" "-p/--pid only takes postive integer as arguments." +exit 1 +fi +;; +-i|--info)JOB_TYPE=" $JOB_TYPE INFO "&&INFO="true";; +-k|--kill)\ +JOB_TYPE=" $JOB_TYPE KILL "&&\ +KILL="true" +[ "$2" = all ]&&JOB="KILL_ALL"&&shift +;; +-l|--logs)JOB_TYPE=" $JOB_TYPE SHOW_LOGS "&&SHOW_LOGS="true";; +-t|--time)_check_longoptions "$1" "$2" +if [ "$2" -gt 0 ] 2>|/dev/null 1>&2;then +case "$2" in +default*)UPDATE_DEFAULT_TIME_TO_SLEEP="_update_config";; +*) +esac +TO_SLEEP="${2##default=/}"&&shift +else +printf "%s\n" "-t/--time only takes positive integers as arguments, min = 1, max = infinity." +exit 1 +fi +;; +-a|--arguments)_check_longoptions "$1" "$2" +case "$2" in +default*)UPDATE_DEFAULT_ARGS="_update_config";; +*) +esac +ARGS=" $ARGS ${2##default=} "&&shift +;; +-fg|--foreground)FOREGROUND="true"&&SHOW_LOGS="true";; +-in|--include)_check_longoptions "$1" "$2" +INCLUDE_FILES="$INCLUDE_FILES -e '$2' "&&shift +;; +-ex|--exclude)_check_longoptions "$1" "$2" +EXCLUDE_FILES="$EXCLUDE_FILES -e '$2' "&&shift +;; +-c|--command)_check_longoptions "$1" "$2" +CUSTOM_COMMAND_NAME="$2"&&shift +;; +--sync-detail-dir)_check_longoptions "$1" "$2" +SYNC_DETAIL_DIR="$2"&&shift +;; +-s|--service)_check_longoptions "$1" "$2" +SERVICE_NAME="$2"&&shift +CREATE_SERVICE="true" +;; +*)case "$1" in +-*)printf '%s: %s: Unknown option\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "$1" "${0##*/}"&&exit 1;; +*)\ +FINAL_INPUT_ARRAY="$FINAL_INPUT_ARRAY + $1" +esac +esac +shift +done +INFO_PATH="$HOME/.google-drive-upload" +CONFIG_INFO="$INFO_PATH/google-drive-upload.configpath" +[ -f "$CONFIG_INFO" ]&&. "$CONFIG_INFO" +CONFIG="${CONFIG:-$HOME/.googledrive.conf}" +SYNC_DETAIL_DIR="${SYNC_DETAIL_DIR:-$INFO_PATH/sync}" +SYNC_LIST="$SYNC_DETAIL_DIR/sync_list" +mkdir -p "$SYNC_DETAIL_DIR"&&printf "" >>"$SYNC_LIST" +_do_job +[ -z "$FINAL_INPUT_ARRAY" ]&&_short_help +return 0 +} +_config_variables(){ +COMMAND_NAME="${CUSTOM_COMMAND_NAME:-$COMMAND_NAME}" +VALUES_LIST="REPO COMMAND_NAME SYNC_COMMAND_NAME INSTALL_PATH TYPE TYPE_VALUE" +VALUES_REGEX=""&&for i in $VALUES_LIST;do +VALUES_REGEX="${VALUES_REGEX:+$VALUES_REGEX|}^$i=\".*\".* # added values" +done +{ +COMMAND_PATH="$(command -v "$COMMAND_NAME")" 1>/dev/null&&SCRIPT_VALUES="$(grep -E "$VALUES_REGEX|^SELF_SOURCE=\".*\"" "$COMMAND_PATH"||:)"&&eval "$SCRIPT_VALUES"&&[ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+$TYPE_VALUE}}}}" ]&&unset SOURCED_GUPLOAD +}||{ printf "Error: %s is not installed, use -c/--command to specify.\n" "$COMMAND_NAME" 1>&2&&exit 1;} +ARGS=" $ARGS -q " +SYNC_TIME_TO_SLEEP="3" +[ -r "$CONFIG" ]&&. "$CONFIG" +SYNC_TIME_TO_SLEEP="${TO_SLEEP:-$SYNC_TIME_TO_SLEEP}" +ARGS=" $ARGS ${SYNC_DEFAULT_ARGS:-} " +"${UPDATE_DEFAULT_ARGS:-:}" SYNC_DEFAULT_ARGS " $ARGS " "$CONFIG" +"${UPDATE_DEFAULT_TIME_TO_SLEEP:-:}" SYNC_TIME_TO_SLEEP "$SYNC_TIME_TO_SLEEP" "$CONFIG" +return 0 +} +_systemd_service_contents(){ +username_systemd_service_contents="${LOGNAME:?Give username}" install_path_systemd_service_contents="${INSTALL_PATH:?Missing install path}" +cmd_systemd_service_contents="${COMMAND_NAME:?Missing command name}" sync_cmd_systemd_service_contents="${SYNC_COMMAND_NAME:?Missing gsync cmd name}" +all_argumnets_systemd_service_contents="${ALL_ARGUMNETS:-}" +printf "%s\n" '# Systemd service file - start +[Unit] +Description=google-drive-upload synchronisation service +After=network.target + +[Service] +Type=simple +User='"$username_systemd_service_contents"' +Restart=on-abort +RestartSec=3 +ExecStart="'"$install_path_systemd_service_contents/$sync_cmd_systemd_service_contents"'" --foreground --command "'"$install_path_systemd_service_contents/$cmd_systemd_service_contents"'" --sync-detail-dir "/tmp/sync" '"$all_argumnets_systemd_service_contents"' + +# Security +PrivateTmp=true +ProtectSystem=full +NoNewPrivileges=true +ProtectControlGroups=true +ProtectKernelModules=true +ProtectKernelTunables=true +PrivateDevices=true +RestrictAddressFamilies=AF_INET AF_INET6 AF_NETLINK +RestrictNamespaces=true +RestrictRealtime=true +SystemCallArchitectures=native + +[Install] +WantedBy=multi-user.target +# Systemd service file - end' +} +_systemd_service_script(){ +name_systemd_service_script="${1:?Missing service name}" script_systemd_service_script="" +service_file_contents_systemd_service_script="${2:?Missing service file contents}" script_name_systemd_service_script="${3:?Missing script name}" +script_systemd_service_script='#!/usr/bin/env sh +set -e + +_usage() { + printf "%b" "# Service name: '"'$name_systemd_service_script'"' + +# Print the systemd service file contents +sh \"${0##*/}\" print\n +# Add service to systemd files ( this must be run before doing any of the below ) +sh \"${0##*/}\" add\n +# Start or Stop the service +sh \"${0##*/}\" start / stop\n +# Enable or Disable as a boot service: +sh \"${0##*/}\" enable / disable\n +# See logs +sh \"${0##*/}\" logs\n +# Remove the service from system +sh \"${0##*/}\" remove\n\n" + + _status + exit 0 +} + +_status() { + status_status="" current_status_status="" + status_status="$(systemctl status '"'$name_systemd_service_script'"' 2>&1 || :)" + current_status_status="$(printf "%s\n" "${status_status}" | env grep -E "●.*|(Loaded|Active|Main PID|Tasks|Memory|CPU): .*" || :)" + + printf "%s\n" "Current status of service: ${current_status_status:-${status_status}}" + return 0 +} + +unset TMPFILE + +[ $# = 0 ] && _usage + +CONTENTS='"'$service_file_contents_systemd_service_script'"' + +_add_service() { + service_file_path_add_service="/etc/systemd/system/'"$name_systemd_service_script"'.service" + printf "%s\n" "Service file path: ${service_file_path_add_service}" + if [ -f "${service_file_path_add_service}" ]; then + printf "%s\n" "Service file already exists. Overwriting" + sudo mv "${service_file_path_add_service}" "${service_file_path_add_service}.bak" || exit 1 + printf "%s\n" "Existing service file was backed up." + printf "%s\n" "Old service file: ${service_file_path_add_service}.bak" + else + [ -z "${TMPFILE}" ] && { + { { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || + TMPFILE="$(pwd)/.$(_t="$(date +"%s")" && printf "%s\n" "$((_t * _t))").LOG"; } || exit 1 + } + export TMPFILE + trap "exit" INT TERM + _rm_tmpfile() { rm -f "${TMPFILE:?}" ; } + trap "_rm_tmpfile" EXIT + trap "" TSTP # ignore ctrl + z + + { printf "%s\n" "${CONTENTS}" >|"${TMPFILE}" && sudo cp "${TMPFILE}" /etc/systemd/system/'"$name_systemd_service_script"'.service; } || + { printf "%s\n" "Error: Failed to add service file to system." && exit 1 ;} + fi + sudo systemctl daemon-reload || printf "%s\n" "Could not reload the systemd daemon." + printf "%s\n" "Service file was successfully added." + return 0 +} + +_service() { + service_name_service='"'$name_systemd_service_script'"' action_service="${1:?}" service_file_path_service="" + service_file_path_service="/etc/systemd/system/${service_name_service}.service" + printf "%s\n" "Service file path: ${service_file_path_service}" + [ -f "${service_file_path_service}" ] || { printf "%s\n" "Service file does not exist." && exit 1; } + sudo systemctl daemon-reload || exit 1 + case "${action_service}" in + log*) sudo journalctl -u "${service_name_service}" -f ;; + rm | remove) + sudo systemctl stop "${service_name_service}" || : + if sudo rm -f /etc/systemd/system/"${service_name_service}".service; then + sudo systemctl daemon-reload || : + printf "%s\n" "Service removed." && return 0 + else + printf "%s\n" "Error: Cannot remove." && exit 1 + fi + ;; + *) + success_service="${2:?}" error_service="${3:-}" + if sudo systemctl "${action_service}" "${service_name_service}"; then + printf "%s\n" "Success: ${service_name_service} ${success_service}." && return 0 + else + printf "%s\n" "Error: Cannot ${action_service} ${service_name_service} ${error_service}." && exit 1 + fi + ;; + esac + return 0 +} + +while [ "${#}" -gt 0 ]; do + case "${1}" in + print) printf "%s\n" "${CONTENTS}" ;; + add) _add_service ;; + start) _service start started ;; + stop) _service stop stopped ;; + enable) _service enable "boot service enabled" "boot service" ;; + disable) _service disable "boot service disabled" "boot service" ;; + logs) _service logs ;; + remove) _service rm ;; + *) printf "%s\n" "Error: No valid options provided." && _usage ;; + esac + shift +done' +printf "%s\n" "$script_systemd_service_script" >|"$script_name_systemd_service_script" +return 0 +} +_process_arguments(){ +unset status_process_arguments_process_arguments current_folder_process_arguments_process_arguments Aseen +while read -r INPUT <&4&&{ [ -n "$INPUT" ]||continue;}&&case "$Aseen" in +*"|:_//_:|$INPUT|:_//_:|"*)continue;; +*)Aseen="$Aseen|:_//_:|$INPUT|:_//_:|" +esac;do +! [ -d "$INPUT" ]&&printf "\nError: Invalid Input ( %s ), no such directory.\n" "$INPUT"&&continue +current_folder_process_arguments="$(pwd)" +FOLDER="$(cd "$INPUT"&&pwd)"||exit 1 +[ -n "$DEFAULT_ACCOUNT" ]&&_set_value indirect ROOT_FOLDER_NAME "ACCOUNT_${DEFAULT_ACCOUNT}_ROOT_FOLDER_NAME" +GDRIVE_FOLDER="${GDRIVE_FOLDER:-${ROOT_FOLDER_NAME:-Unknown}}" +[ -n "$CREATE_SERVICE" ]&&{ +ALL_ARGUMNETS="\"$FOLDER\" ${TO_SLEEP:+-t \"$TO_SLEEP\"} -a \"$ARGS\"" +num_process_arguments="${num_process_arguments+$(printf "%s\n" $((num_process_arguments+1)))}" +service_name_process_arguments="gsync-$SERVICE_NAME${num_process_arguments:+_$num_process_arguments}" +script_name_process_arguments="$service_name_process_arguments.service.sh" +_systemd_service_script "$service_name_process_arguments" "$(_systemd_service_contents)" "$script_name_process_arguments" +_print_center "normal" "=" "=" +sh "$script_name_process_arguments" +_print_center "normal" "=" "=" +continue +} +cd "$FOLDER"||exit 1 +_check_existing_loop +case "$RETURN_STATUS" in +0|2)_start_new_loop;; +1)printf "%b\n" "Job is already running.." +if [ -n "$INFO" ];then +_get_job_info "$PID" more "PID: $PID|:_//_:|$FOLDER|:_//_:|$GDRIVE_FOLDER" +else +printf "%b\n" "Local Folder: $INPUT\nDrive Folder: $GDRIVE_FOLDER" +printf "%s\n" "PID: $PID" +fi +[ -n "$KILL" ]&&_kill_job "$PID"&&exit +[ -n "$SHOW_LOGS" ]&&tail -f "$LOGS" +;; +*) +esac +cd "$current_folder_process_arguments"||exit 1 +done 4<&1';}||{ command -v zsh 1>|/dev/null&&zsh -c 'printf "%s\n" "${COLUMNS}"';}||{ command -v stty 1>|/dev/null&&_tmp="$(stty size)"&&printf "%s\n" "${_tmp##* }";}||{ command -v tput 1>|/dev/null&&tput cols;})"||: +[ "$((COLUMNS))" -gt 45 ]&&return 0 +} +_set_value(){ +case "${1:?}" in +d|direct)export "${2:?}=$3";; +i|indirect)eval export "$2"=\"\$"$3"\";; +*)return 1 +esac +} +_url_encode()(\ +LC_ALL=C \ +LANG=C +awk 'BEGIN {while (y++ < 125) z[sprintf("%c", y)] = y + while (y = substr(ARGV[1], ++j, 1)) + q = y ~ /[[:alnum:]]_.!~*\47()-]/ ? q y : q sprintf("%%%02X", z[y]) + print q}' "$1") +_auto_update(){ +export COMMAND_NAME INSTALL_PATH TYPE TYPE_VALUE REPO LAST_UPDATE_TIME AUTO_UPDATE_INTERVAL +command -v "$COMMAND_NAME" 1>/dev/null&&if [ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+$TYPE_VALUE}}}}" ];then +current_time="$(_epoch)" +[ "$((LAST_UPDATE_TIME+AUTO_UPDATE_INTERVAL))" -lt "$(_epoch)" ]&&_update update +_update_value LAST_UPDATE_TIME "$current_time" +fi +return 0 +} +_update(){ +job_update="${1:-update}" +[ "${GLOBAL_INSTALL:-}" = true ]&&! [ "$(id -u)" = 0 ]&&printf "%s\n" "Error: Need root access to update."&&return 0 +[ "$job_update" = uninstall ]&&job_uninstall="--uninstall" +_print_center "justify" "Fetching $job_update script.." "-" +repo_update="${REPO:-labbots/google-drive-upload}" type_value_update="${TYPE_VALUE:-latest}" cmd_update="${COMMAND_NAME:-gupload}" path_update="${INSTALL_PATH:-$HOME/.gdrive-downloader/bin}" +{ [ "${TYPE:-}" != branch ]&&type_value_update="$(_get_latest_sha release "$type_value_update" "$repo_update")";}||: +if script_update="$(curl --compressed -Ls "https://github.com/$repo_update/raw/$type_value_update/install.sh")";then +_clear_line 1 +printf "%s\n" "$script_update"|sh -n||{ +printf "%s\n" "Install script downloaded but malformed, try again and if the issue persists open an issue on github." +return 1 +} +printf "%s\n" "$script_update"|sh -s -- ${job_uninstall:-} --skip-internet-check --cmd "$cmd_update" --path "$path_update" +current_time="$(date +'%s')" +[ -z "$job_uninstall" ]&&_update_value LAST_UPDATE_TIME "$current_time" +else +_clear_line 1 +"${QUIET:-_print_center}" "justify" "Error: Cannot download" " $job_update script." "=" 1>&2 +return 1 +fi +return 0 +} +_update_value(){ +command_path="${INSTALL_PATH:?}/${COMMAND_NAME:?}" +value_name="${1:?}" value="${2:-}" +script_without_value_and_shebang="$(grep -v "$value_name=\".*\".* # added values" -- "$command_path"|sed 1d)" +new_script="$(sed -n 1p -- "$command_path" +printf "%s\n" "$value_name=\"$value\" # added values" +printf "%s\n" "$script_without_value_and_shebang")" +printf "%s\n" "$new_script"|"${INSTALLATION:-bash}" -n||{ +printf "%s\n" "Update downloaded but malformed, try again and if the issue persists open an issue on github." +return 1 +} +chmod u+w -- "$command_path"&&printf "%s\n" "$new_script" >|"$command_path"&&chmod "a-w-r-x,${PERM_MODE:-u}+r+x" -- "$command_path" +return 0 +} +_is_fd_open(){ +for fd in ${1:?};do +if ! { true >&"$fd";} 2<>/dev/null;then +printf "%s\n" "Error: fd $fd not open." +return 1 +fi +done +} +_parser_add_help(){ +_PARSER_ALL_HELP="$_PARSER_ALL_HELP +${__PARSER_BAR:-} +${1:-}" 2>|/dev/null +} +_parser_check_arguments(){ +nargs_parser_check_arguments="$((${1:?_parser_check_arguments}))" +num_parser_check_arguments=$(($#-2)) +[ "$num_parser_check_arguments" -lt "$nargs_parser_check_arguments" ]&&{ +printf "%s\n" "${0##*/}: $2: flag requires $nargs_parser_check_arguments argument." +printf "\n%s\n" "Help:" +printf "%s\n" "$(_usage "$2")" +exit 1 +} +return 0 +} +_flag_exists(){ +tmp_flag_exists="" option_flag_exists="" +_flag_help "${1:?}" tmp_flag_exists option_flag_exists +[ -z "$tmp_flag_exists" ]&&return 1 +_set_value d "${2:?}" "$option_flag_exists" +} +_flag_help(){ +flag_flag_help="" +_trim "-" "${1:?_flag_help}" flag_flag_help +_set_value i "${2:?_flag_help}" "_parser__help_$flag_flag_help" +_set_value d "${3:-_}" "$flag_flag_help" +} +_parse_arguments(){ +__NEWLINE=" +" +_parse_support_ansi_escapes(){ +case "$TERM" in +xterm*|rxvt*|urxvt*|linux*|vt*|screen*){ [ -t 2 ]&&return 0;}||return 1;; +*): +esac +{ [ -t 2 ]&&return 0;}||return 1 +} +_parser_required_column_size(){ +COLUMNS="$({ command -v bash 1>|/dev/null&&bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1';}||{ command -v zsh 1>|/dev/null&&zsh -c 'printf "%s\n" "${COLUMNS}"';}||{ command -v stty 1>|/dev/null&&_tmp="$(stty size)"&&printf "%s\n" "${_tmp##* }";}||{ command -v tput 1>|/dev/null&&tput cols;})"||: +[ "$((COLUMNS))" -gt 45 ]&&return 0 +} +_parse_support_ansi_escapes&&_parser_required_column_size&&__PARSER_BAR="$(\ +filler='' \ +symbol='_' +i=1&&while [ "$i" -le "$COLUMNS" ];do +filler="$filler$symbol"&&i="$((i+1))" +done +printf "%s\n" "$filler")" +__PARSER_BAR="${__PARSER_BAR:+$__PARSER_BAR$__NEWLINE}" +unset _PARSER_ALL_HELP _PARSER_ARGS_SHIFT _PARSER_PREPROCESS_FUNCTION +unset _PARSER_FLAGS _PARSER_CURRENT_FLAGS _PARSER_CURRENT_NARGS _PARSER_CURRENT_ARGS _PARSER_CURRENT_ARGS_TYPE +"${1:?_parse_arguments - 1: Missing funtion name to setup flags}"||return 1 +shift 2>|/dev/null +_parser_run_preprocess||return 1 +while [ "$#" -gt 0 ];do +case "$1" in +''):;; +--)shift +while [ "$#" -gt 0 ];do +_parser_process_input "$@"||return 1 +shift +done +;; +-*)\ +flag_parse_arguments="" +if _flag_exists "$1" flag_parse_arguments;then +"_parser_process_$flag_parse_arguments" "$@"||return 1 +else +printf "%s\n\n" "${0##*/}: $1: Unknown option" +_short_help +fi +;; +*)_parser_process_input "$@"||return 1 +esac +_PARSER_ARGS_SHIFT="$((_PARSER_ARGS_SHIFT+1))" +shift "$_PARSER_ARGS_SHIFT" +_PARSER_ARGS_SHIFT="0" +done +return 0 +} +_parser_setup_flag(){ +_PARSER_CURRENT_FLAGS="" tmp_parser_setup_flag="" +_PARSER_FLAGS="${1:?_parser_setup_flag}" +for f in $_PARSER_FLAGS;do +_trim "-" "$f" tmp_parser_setup_flag +_PARSER_CURRENT_FLAGS="$_PARSER_CURRENT_FLAGS $tmp_parser_setup_flag" +done +_PARSER_CURRENT_NARGS="${2:?_parser_setup_flag}" +_PARSER_CURRENT_ARGS_TYPE="$3" +_PARSER_CURRENT_ARGS="$4" +} +_parser_setup_flag_help(){ +flags_parser_setup_flag_help="${_PARSER_CURRENT_FLAGS:?_parser_setup_flag_help}" +nargs_parser_setup_flag_help="${_PARSER_CURRENT_NARGS:?_parser_setup_flag_help}" +unset start_parser_setup_flag_help \ +help_parser_setup_flag_help \ +arg_parser_setup_flag_help \ +all_parser_setup_flag_help +while IFS= read -r line <&4;do +help_parser_setup_flag_help="$help_parser_setup_flag_help + $line" +done 4<|/dev/null +} +_parser_setup_flag_preprocess(){ +_is_fd_open 4||return 1 +unset fn_parser_setup_flag_preprocess +while IFS= read -r line <&4;do +fn_parser_setup_flag_preprocess="$fn_parser_setup_flag_preprocess +$line" +done +_PARSER_PREPROCESS_FUNCTION="$_PARSER_PREPROCESS_FUNCTION +$fn_parser_setup_flag_preprocess" +} +_parser_setup_flag_process(){ +_is_fd_open 4||return 1 +unset fn_parser_setup_flag_process +if [ "${_PARSER_CURRENT_NARGS:?_parser_setup_flag_process}" -gt 0 ]&&! [ "$_PARSER_CURRENT_ARGS_TYPE" = optional ];then +fn_parser_setup_flag_process="_parser_check_arguments ${_PARSER_CURRENT_NARGS:?_parser_setup_flag_process} \"\${@}\"" +fi +while IFS= read -r line <&4;do +fn_parser_setup_flag_process="$fn_parser_setup_flag_process +$line" +done +for f in ${_PARSER_CURRENT_FLAGS:?_parser_setup_flag_process};do +eval "_parser_process_$f() { $fn_parser_setup_flag_process ; }" +done +} +_parser_run_preprocess(){ +eval "_parser_preprocess_setup() { ${_PARSER_PREPROCESS_FUNCTION:-:} ; }"&&_parser_preprocess_setup +} +_parser_shift(){ +export _PARSER_ARGS_SHIFT="${1:-1}" +} +_short_help(){ +printf "No valid arguments provided, use -h/--help flag to see usage.\n" +exit 0 +} +_set_value(){ +case "${1:?}" in +d|direct)export "${2:?}=$3";; +i|indirect)eval export "$2"=\"\$"$3"\";; +*)return 1 +esac +} +_trim(){ +char_trim="$1" str_trim="$2" var_trim="$3" +set -f +old_ifs="$IFS" +IFS="$char_trim" +set -- $str_trim +IFS= +if [ -n "$var_trim" ];then +_set_value d "$var_trim" "$*" +else +printf "%s" "$*" +fi +IFS="$old_ifs" +set +f +} +_parser_setup_flags(){ +_parser_add_help " +The script can be used to upload file/directory to google drive. + +Usage: +${0##*/} [options.. ] + +Foldername argument is optional. If not provided, the file will be uploaded to preconfigured google drive root folder. + +File name argument is optional if create directory option is used. + +Options:" +_parser_setup_flag "input" 0 +_parser_setup_flag_help \ +"Input files or drive ids to process." +_parser_setup_flag_preprocess 4<<'EOF' +unset TOTAL_ID_INPUTS TOTAL_FILE_INPUTS +EOF +_parser_setup_flag_process 4<<'EOF' +# set INPUT_FILE|ID_num to the input, where num is rank of input +case "${1}" in + *drive.google.com* | *docs.google.com*) _set_value d "INPUT_ID_$((TOTAL_ID_INPUTS += 1))" "$(_extract_id "${1}")" ;; + *) + [ -r "${1}" ] || { + { "${QUIET:-_print_center}" 'normal' "[ Error: Invalid File - ${1} ]" "=" && printf "\n"; } 1>&2 + return + } + _set_value d "INPUT_FILE_$((TOTAL_FILE_INPUTS += 1))" "${1}" + ;; +esac +EOF +_parser_setup_flag "-a --account" 1 required "account name" +_parser_setup_flag_help \ +"Use a different account than the default one. + +To change the default account name, use this format, -a/--account default=account_name" +_parser_setup_flag_preprocess 4<<'EOF' +unset OAUTH_ENABLED ACCOUNT_NAME ACCOUNT_ONLY_RUN CUSTOM_ACCOUNT_NAME UPDATE_DEFAULT_ACCOUNT +EOF +_parser_setup_flag_process 4<<'EOF' +export OAUTH_ENABLED="true" CUSTOM_ACCOUNT_NAME="${2##default=}" +[ -z "${2##default=*}" ] && export UPDATE_DEFAULT_ACCOUNT="_update_config" +_parser_shift +EOF +_parser_setup_flag "-la --list-accounts" 0 +_parser_setup_flag_help \ +"Print all configured accounts in the config files." +_parser_setup_flag_preprocess 4<<'EOF' +unset LIST_ACCOUNTS +EOF +_parser_setup_flag_process 4<<'EOF' +export LIST_ACCOUNTS="true" +EOF +_parser_setup_flag "-ca --create-account" 1 required "account name" +_parser_setup_flag_help \ +"To create a new account with the given name if does not already exists." +_parser_setup_flag_preprocess 4<<'EOF' +unset OAUTH_ENABLED NEW_ACCOUNT_NAME +EOF +_parser_setup_flag_process 4<<'EOF' +export OAUTH_ENABLED="true" +export NEW_ACCOUNT_NAME="${2}" && _parser_shift +EOF +_parser_setup_flag "-da --delete-account" 1 required "account name" +_parser_setup_flag_help \ +"To delete an account information from config file." +_parser_setup_flag_preprocess 4<<'EOF' +unset DELETE_ACCOUNT_NAME +EOF +_parser_setup_flag_process 4<<'EOF' +export DELETE_ACCOUNT_NAME="${2}" && _parser_shift +EOF +_parser_setup_flag "-c -C --create-dir" 1 required "foldername" +_parser_setup_flag_help \ +"Option to create directory. Will print folder id. Can be used to provide input folder, see README." +_parser_setup_flag_preprocess 4<<'EOF' +unset FOLDERNAME +EOF +_parser_setup_flag_process 4<<'EOF' +export FOLDERNAME="${2}" && _parser_shift +EOF +_parser_setup_flag "-r --root-dir" 1 required "google folder id or folder url containing id" +_parser_setup_flag_help \ +"Google folder ID/URL to which the file/directory is going to upload. +If you want to change the default value, then use this format, -r/--root-dir default=root_folder_id/root_folder_url" +_parser_setup_flag_preprocess 4<<'EOF' +unset ROOTDIR UPDATE_DEFAULT_ROOTDIR +EOF +_parser_setup_flag_process 4<<'EOF' +ROOTDIR="${2##default=}" +[ -z "${2##default=*}" ] && export UPDATE_DEFAULT_ROOTDIR="_update_config" +_parser_shift +EOF +_parser_setup_flag "-s --skip-subdirs" 0 +_parser_setup_flag_help \ +"Skip creation of sub folders and upload all files inside the INPUT folder/sub-folders in the INPUT folder, use this along with -p/--parallel option to speed up the uploads." +_parser_setup_flag_preprocess 4<<'EOF' +unset SKIP_SUBDIRS +EOF +_parser_setup_flag_process 4<<'EOF' +export SKIP_SUBDIRS="true" +EOF +_parser_setup_flag "-p --parallel" 1 required "no of files to parallely upload" +_parser_setup_flag_help \ +"Upload multiple files in parallel, Max value = 10." +_parser_setup_flag_preprocess 4<<'EOF' +unset NO_OF_PARALLEL_JOBS PARALLEL_UPLOAD +EOF +_parser_setup_flag_process 4<<'EOF' +if [ "${2}" -gt 0 ] 2>| /dev/null 1>&2; then + export NO_OF_PARALLEL_JOBS="${2}" +else + printf "\nError: -p/--parallel accepts values between 1 to 10.\n" + return 1 +fi +export PARALLEL_UPLOAD="parallel" && _parser_shift +EOF +_parser_setup_flag "-cl --clone" 1 required "gdrive id or link" +_parser_setup_flag_help \ +"Upload a gdrive file without downloading." +_parser_setup_flag_preprocess 4<<'EOF' +unset TOTAL_ID_INPUTS +EOF +_parser_setup_flag_process 4<<'EOF' +# set INPUT_FILE|ID_num to the input, where num is rank of input +case "${1}" in + *drive.google.com* | *docs.google.com*) _set_value d "INPUT_ID_$((TOTAL_ID_INPUTS += 1))" "$(_extract_id "${1}")" ;; +esac +_parser_shift +EOF +_parser_setup_flag "-o --overwrite" 0 +_parser_setup_flag_help \ +"Overwrite the files with the same name, if present in the root folder/input folder, also works with recursive folders." +_parser_setup_flag_preprocess 4<<'EOF' +unset OVERWRITE UPLOAD_MODE +EOF +_parser_setup_flag_process 4<<'EOF' +export OVERWRITE="Overwrite" UPLOAD_MODE="update" +EOF +_parser_setup_flag "-d --skip-duplicates" 0 +_parser_setup_flag_help \ +"Do not upload the files with the same name and size, if already present in the root folder/input folder, also works with recursive folders." +_parser_setup_flag_preprocess 4<<'EOF' +unset SKIP_DUPLICATES UPLOAD_MODE +EOF +_parser_setup_flag_process 4<<'EOF' +export SKIP_DUPLICATES="Skip Existing" UPLOAD_MODE="update" +EOF +_parser_setup_flag "-cm --check-mode" 1 required "size or md5" +_parser_setup_flag_help \ +"Additional flag for --overwrite and --skip-duplicates flag. Can be used to change check mode in those flags, available args are 'size' and 'md5'." +_parser_setup_flag_preprocess 4<<'EOF' +unset CHECK_MODE +EOF +_parser_setup_flag_process 4<<'EOF' +case "${2}" in + size) export CHECK_MODE="2" && _parser_shift ;; + md5) export CHECK_MODE="3" && _parser_shift ;; + *) printf "\nError: -cm/--check-mode takes size and md5 as argument.\n" ;; +esac +EOF +_parser_setup_flag "-desc --description --description-all" 1 required "description of file" +_parser_setup_flag_help \ +"Specify description for the given file. To use the respective metadata of a file, below is the format: + +File name ( fullname ): %f | Size: %s | Mime Type: %m + +Now to actually use it: --description 'Filename: %f, Size: %s, Mime: %m' + +Note: For files inside folders, use --description-all flag." +_parser_setup_flag_preprocess 4<<'EOF' +unset DESCRIPTION DESCRIPTION_ALL +EOF +_parser_setup_flag_process 4<<'EOF' +[ "${1}" = "--description-all" ] && export DESCRIPTION_ALL="true" +export DESCRIPTION="${2}" && _parser_shift +EOF +_parser_setup_flag "-S --share" 1 required "email address" +_parser_setup_flag_help \ +"Share the uploaded input file/folder, grant reader permission to provided email address or to everyone with the shareable link." +_parser_setup_flag_preprocess 4<<'EOF' +unset SHARE EMAIL_REGEX SHARE_EMAIL +EOF +_parser_setup_flag_process 4<<'EOF' +SHARE="_share_id" +EMAIL_REGEX="^(([A-Za-z0-9]+((\.|\-|\_|\+)?[A-Za-z0-9]?)*[A-Za-z0-9]+)|[A-Za-z0-9]+)@(([A-Za-z0-9]+)+((\.|\-|\_)?([A-Za-z0-9]+)+)*)+\.([A-Za-z]{2,})+$" +case "${2}" in + -* | '') : ;; + *) + if _assert_regex "${EMAIL_REGEX}" "${2}"; then + SHARE_EMAIL="${2}" && _parser_shift && export SHARE_EMAIL + fi + ;; +esac +SHARE_ROLE="${SHARE_ROLE:-reader}" +EOF +_parser_setup_flag "-SM -sm --share-mode" 1 required "share mode - r/w/c" +_parser_setup_flag_help \ +"Specify the share mode for sharing file. + + Share modes are: r / reader - Read only permission. + + : w / writer - Read and write permission. + + : c / commenter - Comment only permission. + +Note: Although this flag is independent of --share flag but when email is needed, then --share flag use is neccessary." +_parser_setup_flag_preprocess 4<<'EOF' +unset SHARE_ROLE SHARE +EOF +_parser_setup_flag_process 4<<'EOF' +case "${2}" in + r | read*) SHARE_ROLE="reader" ;; + w | write*) SHARE_ROLE="writer" ;; + c | comment*) SHARE_ROLE="commenter" ;; + *) + printf "%s\n" "Invalid share mode given ( ${2} ). Supported values are r or reader / w or writer / c or commenter." && + exit 1 + ;; +esac +SHARE="_share_id" +_parser_shift +EOF +_parser_setup_flag "--speed" 1 required "speed" +_parser_setup_flag_help \ +"Limit the download speed, supported formats: 1K, 1M and 1G." +_parser_setup_flag_preprocess 4<<'EOF' +unset CURL_SPEED +EOF +_parser_setup_flag_process 4<<'EOF' +_tmp_regex='^([0-9]+)([k,K]|[m,M]|[g,G])+$' +if _assert_regex "${_tmp_regex}" "${2}"; then + export CURL_SPEED="--limit-rate ${2}" && _parser_shift +else + printf "Error: Wrong speed limit format, supported formats: 1K , 1M and 1G\n" 1>&2 + exit 1 +fi +EOF +_parser_setup_flag "-i --save-info" 1 required "file where to save info" +_parser_setup_flag_help \ +"Save uploaded files info to the given filename." +_parser_setup_flag_preprocess 4<<'EOF' +unset LOG_FILE_ID +EOF +_parser_setup_flag_process 4<<'EOF' +export LOG_FILE_ID="${2}" && _parser_shift +EOF +_parser_setup_flag "-z --config" 1 required "config path" +_parser_setup_flag_help \ +"Override default config file with custom config file. +If you want to change default value, then use this format -z/--config default=default=your_config_file_path." +_parser_setup_flag_preprocess 4<<'EOF' +unset UPDATE_DEFAULT_CONFIG +_check_config() { + [ -z "${1##default=*}" ] && export UPDATE_DEFAULT_CONFIG="_update_config" + { [ -r "${2}" ] && CONFIG="${2}"; } || { + printf "Error: Given config file (%s) doesn't exist/not readable,..\n" "${1}" 1>&2 && exit 1 + } + return 0 +} +EOF +_parser_setup_flag_process 4<<'EOF' +_check_config "${2}" "${2/default=/}" +_parser_shift +EOF +_parser_setup_flag "-q --quiet" 0 +_parser_setup_flag_help \ +"Supress the normal output, only show success/error upload messages for files, and one extra line at the beginning for folder showing no. of files and sub folders." +_parser_setup_flag_preprocess 4<<'EOF' +unset QUIET +EOF +_parser_setup_flag_process 4<<'EOF' +export QUIET="_print_center_quiet" +EOF +_parser_setup_flag "-R --retry" 1 required "num of retries" +_parser_setup_flag_help \ +"Retry the file upload if it fails, postive integer as argument. Currently only for file uploads." +_parser_setup_flag_preprocess 4<<'EOF' +unset RETRY +EOF +_parser_setup_flag_process 4<<'EOF' +if [ "$((2))" -gt 0 ] 2>| /dev/null 1>&2; then + export RETRY="${2}" && _parser_shift +else + printf "Error: -R/--retry only takes positive integers as arguments, min = 1, max = infinity.\n" + exit 1 +fi +EOF +_parser_setup_flag "-in --include" 1 required "pattern" +_parser_setup_flag_help \ +"Only include the files with the given pattern to upload - Applicable for folder uploads. + +e.g: ${0##*/} local_folder --include '*1*', will only include with files with pattern '1' in the name." +_parser_setup_flag_preprocess 4<<'EOF' +unset INCLUDE_FILES +EOF +_parser_setup_flag_process 4<<'EOF' +INCLUDE_FILES="${INCLUDE_FILES} -name '${2}' " && _parser_shift +EOF +_parser_setup_flag "-ex --exclude" 1 required "pattern" +_parser_setup_flag_help \ +"Exclude the files with the given pattern from uploading. - Applicable for folder uploads. + +e.g: ${0##*/} local_folder --exclude "*1*", will exclude all the files pattern '1' in the name." +_parser_setup_flag_preprocess 4<<'EOF' +unset EXCLUDE_FILES +EOF +_parser_setup_flag_process 4<<'EOF' +EXCLUDE_FILES="${EXCLUDE_FILES} -name ! '${2}' " && _parser_shift +EOF +_parser_setup_flag "--hide" 0 +_parser_setup_flag_help \ +"This flag will prevent the script to print sensitive information like root folder id and drivelink." +_parser_setup_flag_preprocess 4<<'EOF' +unset HIDE_INFO +EOF +_parser_setup_flag_process 4<<'EOF' +HIDE_INFO=":" +EOF +_parser_setup_flag "-v --verbose" 0 +_parser_setup_flag_help \ +"Display detailed message (only for non-parallel uploads)." +_parser_setup_flag_preprocess 4<<'EOF' +unset VERBOSE +EOF +_parser_setup_flag_process 4<<'EOF' +export VERBOSE="true" +EOF +_parser_setup_flag "-V --verbose-progress" 0 +_parser_setup_flag_help \ +"Display detailed message and detailed upload progress(only for non-parallel uploads)." +_parser_setup_flag_preprocess 4<<'EOF' +unset VERBOSE_PROGRESS +EOF +_parser_setup_flag_process 4<<'EOF' +export VERBOSE_PROGRESS="true" +EOF +_parser_setup_flag "--skip-internet-check" 0 +_parser_setup_flag_help \ +"Do not check for internet connection, recommended to use in sync jobs." +_parser_setup_flag_preprocess 4<<'EOF' +unset SKIP_INTERNET_CHECK +EOF +_parser_setup_flag_process 4<<'EOF' +export SKIP_INTERNET_CHECK=":" +EOF +_parser_setup_flag "-V --version --info" 0 +_parser_setup_flag_help \ +"Show detailed info, only if script is installed system wide." +_parser_setup_flag_preprocess 4<<'EOF' +################################################### +# Print info if installed +################################################### +_version_info() { + export COMMAND_NAME REPO INSTALL_PATH TYPE TYPE_VALUE + if command -v "${COMMAND_NAME}" 1> /dev/null && [ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]; then + for i in REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA CONFIG; do + value_version_info="" + _set_value i value_version_info "${i}" + printf "%s\n" "${i}=${value_version_info}" + done | sed -e "s/=/: /g" + else + printf "%s\n" "google-drive-upload is not installed system wide." + fi + exit 0 +} +EOF +_parser_setup_flag_process 4<<'EOF' +_version_info +EOF +_parser_setup_flag "-D --debug" 0 +_parser_setup_flag_help \ +"Display script command trace." +_parser_setup_flag_preprocess 4<<'EOF' +unset DEBUG +EOF +_parser_setup_flag_process 4<<'EOF' +export DEBUG="true" +EOF +_parser_setup_flag "-h --help" 1 optional "flag name" +_parser_setup_flag_help \ +"Print help for all flags and basic usage instructions. + +To see help for a specific flag, --help flag_name ( with or without dashes ) + e.g: ${0##*/} --help aria" +_parser_setup_flag_preprocess 4<<'EOF' +################################################### +# 1st arg - can be flag name +# if 1st arg given, print specific flag help +# otherwise print full help +################################################### +_usage() { + [ -n "${1}" ] && { + help_usage_usage="" + _flag_help "${1}" help_usage_usage + + if [ -z "${help_usage_usage}" ]; then + printf "%s\n" "Error: No help found for ${1}" + else + printf "%s\n%s\n%s\n" "${__PARSER_BAR}" "${help_usage_usage}" "${__PARSER_BAR}" + fi + exit 0 + } + + printf "%s\n" "${_PARSER_ALL_HELP}" + exit 0 +} +EOF +_parser_setup_flag_process 4<<'EOF' +_usage "${2}" +EOF +[ "${GUPLOAD_INSTALLED_WITH:-}" = script ]&&{ +_parser_setup_flag "-u --update" 0 +_parser_setup_flag_help \ +"Update the installed script in your system." +_parser_setup_flag_process 4<<'EOF' +_check_debug && _update && { exit 0 || exit 1; } +EOF +_parser_setup_flag "--uninstall" 0 +_parser_setup_flag_help \ +"Uninstall script, remove related files." +_parser_setup_flag_process 4<<'EOF' +_check_debug && _update uninstall && { exit 0 || exit 1; } +EOF +} +return 0 +} +_account_name_valid(){ +name_account_name_valid="${1:?}" account_name_regex_account_name_valid='^([A-Za-z0-9_])+$' +_assert_regex "$account_name_regex_account_name_valid" "$name_account_name_valid"||return 1 +return 0 +} +_account_exists(){ +name_account_exists="${1:-}" client_id_account_exists="" client_secret_account_exists="" refresh_token_account_exists="" +_account_name_valid "$name_account_exists"||return 1 +_set_value indirect client_id_account_exists "ACCOUNT_${name_account_exists}_CLIENT_ID" +_set_value indirect client_secret_account_exists "ACCOUNT_${name_account_exists}_CLIENT_SECRET" +_set_value indirect refresh_token_account_exists "ACCOUNT_${name_account_exists}_REFRESH_TOKEN" +[ -z "${client_id_account_exists:+${client_secret_account_exists:+$refresh_token_account_exists}}" ]&&return 1 +return 0 +} +_all_accounts(){ +export CONFIG QUIET +{ _reload_config&&_handle_old_config;}||return 1 +COUNT=0 +while read -r account <&4&&[ -n "$account" ];do +_account_exists "$account"&&{ [ "$COUNT" = 0 ]&&"${QUIET:-_print_center}" "normal" " All available accounts. " "="||:;}&&printf "%b" "$((COUNT+=1)). $account \n"&&_set_value direct "ACC_${COUNT}_ACC" "$account" +done 4<|/dev/null +"${QUIET:-_print_center}" "normal" " New account name: " "=" +"${QUIET:-_print_center}" "normal" "Info: Account names can only contain alphabets / numbers / dashes." " "&&printf '\n' +} +until [ -n "$name_valid_set_new_account_name" ];do +if [ -n "$new_account_name_set_new_account_name" ];then +if _account_name_valid "$new_account_name_set_new_account_name";then +if _account_exists "$new_account_name_set_new_account_name";then +"${QUIET:-_print_center}" "normal" " Warning: Given account ( $new_account_name_set_new_account_name ) already exists, input different name. " "-" 1>&2 +unset new_account_name_set_new_account_name&&continue +else +export new_account_name_set_new_account_name="$new_account_name_set_new_account_name" NEW_ACCOUNT_NAME="$new_account_name_set_new_account_name"&&name_valid_set_new_account_name="true"&&continue +fi +else +"${QUIET:-_print_center}" "normal" " Warning: Given account name ( $new_account_name_set_new_account_name ) invalid, input different name. " "-" +unset new_account_name_set_new_account_name&&continue +fi +else +[ -t 1 ]||{ "${QUIET:-_print_center}" "normal" " Error: Not running in an interactive terminal, cannot ask for new account name. " 1>&2&&return 1;} +printf -- "-> \033[?7l" +read -r new_account_name_set_new_account_name +printf '\033[?7h' +fi +_clear_line 1 +done +"${QUIET:-_print_center}" "normal" " Given account name: $NEW_ACCOUNT_NAME " "=" +export ACCOUNT_NAME="$NEW_ACCOUNT_NAME" +return 0 +} +_delete_account(){ +export CONFIG QUIET +{ _reload_config&&_handle_old_config;}||return 1 +account_delete_account="${1:?Error: give account name}"&&unset regex_delete_account config_without_values_delete_account +if _account_exists "$account_delete_account";then +regex_delete_account="^ACCOUNT_${account_delete_account}_(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)|DEFAULT_ACCOUNT=\"$account_delete_account\"" +config_without_values_delete_account="$(grep -vE "$regex_delete_account" -- "$CONFIG")" +chmod u+w -- "$CONFIG"||return 1 +printf "%s\n" "$config_without_values_delete_account" >|"$CONFIG"||return 1 +chmod "a-w-r-x,u+r" -- "$CONFIG"||return 1 +"${QUIET:-_print_center}" "normal" " Successfully deleted account ( $account_delete_account ) from config. " "-" +else +"${QUIET:-_print_center}" "normal" " Error: Cannot delete account ( $account_delete_account ) from config. No such account exists " "-" 1>&2 +fi +return 0 +} +_handle_old_config(){ +export CLIENT_ID CLIENT_SECRET REFRESH_TOKEN ROOT_FOLDER ROOT_FOLDER_NAME +[ -n "${CLIENT_ID:+${CLIENT_SECRET:+$REFRESH_TOKEN}}" ]&&{ +account_name_handle_old_config="default" regex_check_handle_old_config config_without_values_handle_old_config count_handle_old_config +until ! _account_exists "$account_name_handle_old_config";do +account_name_handle_old_config="$account_name_handle_old_config$((count_handle_old_config+=1))" +done +regex_check_handle_old_config="^(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)" +config_without_values_handle_old_config="$(grep -vE "$regex_check_handle_old_config" -- "$CONFIG")" +chmod u+w -- "$CONFIG"||return 1 +printf "%s\n%s\n%s\n%s\n%s\n%s\n" \ +"ACCOUNT_${account_name_handle_old_config}_CLIENT_ID=\"$CLIENT_ID\"" \ +"ACCOUNT_${account_name_handle_old_config}_CLIENT_SECRET=\"$CLIENT_SECRET\"" \ +"ACCOUNT_${account_name_handle_old_config}_REFRESH_TOKEN=\"$REFRESH_TOKEN\"" \ +"ACCOUNT_${account_name_handle_old_config}_ROOT_FOLDER=\"$ROOT_FOLDER\"" \ +"ACCOUNT_${account_name_handle_old_config}_ROOT_FOLDER_NAME=\"$ROOT_FOLDER_NAME\"" \ +"$config_without_values_handle_old_config" >|"$CONFIG"||return 1 +chmod "a-w-r-x,u+r" -- "$CONFIG"||return 1 +_reload_config||return 1 +} +return 0 +} +_check_credentials(){ +export CONFIG CONFIG_INFO DEFAULT_ACCOUNT NEW_ACCOUNT_NAME CUSTOM_ACCOUNT_NAME QUIET COUNT +{ _reload_config&&_handle_old_config;}||return 1 +ACCOUNT_NAME="$DEFAULT_ACCOUNT" +if [ -n "$NEW_ACCOUNT_NAME" ];then +_set_new_account_name "$NEW_ACCOUNT_NAME"||return 1 +_check_account_credentials "$ACCOUNT_NAME"||return 1 +else +if [ -n "$CUSTOM_ACCOUNT_NAME" ];then +if _account_exists "$CUSTOM_ACCOUNT_NAME";then +ACCOUNT_NAME="$CUSTOM_ACCOUNT_NAME" +else +"${QUIET:-_print_center}" "normal" " Error: No such account ( $CUSTOM_ACCOUNT_NAME ) exists. " "-"&&return 1 +fi +elif [ -n "$DEFAULT_ACCOUNT" ];then +_account_exists "$DEFAULT_ACCOUNT"||{ +_update_config DEFAULT_ACCOUNT "" "$CONFIG"&&unset DEFAULT_ACCOUNT ACCOUNT_NAME&&UPDATE_DEFAULT_ACCOUNT="_update_config" +} +else +UPDATE_DEFAULT_ACCOUNT="_update_config" +fi +if [ -z "$ACCOUNT_NAME" ];then +if _all_accounts 2>|/dev/null&&[ "$COUNT" -gt 0 ];then +if [ "$COUNT" -eq 1 ];then +_set_value indirect ACCOUNT_NAME "ACC_1_ACC" +else +"${QUIET:-_print_center}" "normal" " Above accounts are configured, but default one not set. " "=" +if [ -t 1 ];then +"${QUIET:-_print_center}" "normal" " Choose default account: " "-" +until [ -n "$ACCOUNT_NAME" ];do +printf -- "-> \033[?7l" +read -r account_name_check_credentials +printf '\033[?7h' +if [ "$account_name_check_credentials" -gt 0 ]&&[ "$account_name_check_credentials" -le "$COUNT" ];then +_set_value indirect ACCOUNT_NAME "ACC_${COUNT}_ACC" +else +_clear_line 1 +fi +done +else +printf "%s\n" "Warning: Script is not running in a terminal, choosing first account as default." +_set_value indirect ACCOUNT_NAME "ACC_1_ACC" +fi +fi +else +_set_new_account_name ""||return 1 +_check_account_credentials "$ACCOUNT_NAME"||return 1 +fi +fi +_check_account_credentials "$ACCOUNT_NAME"||return 1 +fi +"${UPDATE_DEFAULT_ACCOUNT:-:}" DEFAULT_ACCOUNT "$ACCOUNT_NAME" "$CONFIG" +"${UPDATE_DEFAULT_CONFIG:-:}" CONFIG "$CONFIG" "$CONFIG_INFO" +[ -n "$CONTINUE_WITH_NO_INPUT" ]||_token_bg_service +return 0 +} +_check_account_credentials(){ +account_name_check_account_credentials="${1:?Give account name}" +{ +_check_client ID "$account_name_check_account_credentials"&&_check_client SECRET "$account_name_check_account_credentials"&&_check_refresh_token "$account_name_check_account_credentials"&&_check_access_token "$account_name_check_account_credentials" check +}||return 1 +return 0 +} +_check_client(){ +export CONFIG QUIET +type_check_client="CLIENT_${1:?Error: ID or SECRET}" account_name_check_client="${2:-}" +unset type_value_check_client type_name_check_client valid_check_client client_check_client message_check_client regex_check_client +if [ "$type_check_client" = "CLIENT_ID" ];then +regex_check_client='[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com' +else +regex_check_client='[0-9A-Za-z_-]+' +fi +type_name_check_client="${account_name_check_client:+ACCOUNT_${account_name_check_client}_}$type_check_client" +_set_value indirect type_value_check_client "$type_name_check_client" +until [ -n "$type_value_check_client" ]&&[ -n "$valid_check_client" ];do +[ -n "$type_value_check_client" ]&&{ +if _assert_regex "$regex_check_client" "$type_value_check_client";then +[ -n "$client_check_client" ]&&{ _update_config "$type_name_check_client" "$type_value_check_client" "$CONFIG"||return 1;} +valid_check_client="true"&&continue +else +{ [ -n "$client_check_client" ]&&message_check_client="- Try again";}||message_check_client="in config ( $CONFIG )" +"${QUIET:-_print_center}" "normal" " Invalid Client $1 $message_check_client " "-"&&unset "$type_name_check_client" client +fi +} +[ -z "$client_check_client" ]&&printf "\n"&&"${QUIET:-_print_center}" "normal" " Enter Client $1 " "-" +[ -n "$client_check_client" ]&&_clear_line 1 +printf -- "-> " +read -r "${type_name_check_client?}"&&client_check_client=1 +_set_value indirect type_value_check_client "$type_name_check_client" +done +_set_value direct "$type_name_check_client" "$type_value_check_client" +_set_value direct "$type_check_client" "$type_value_check_client" +return 0 +} +_check_refresh_token(){ +export CLIENT_ID CLIENT_SECRET QUIET CONFIG CURL_PROGRESS SCOPE REDIRECT_URI TOKEN_URL +[ -z "${CLIENT_ID:+$CLIENT_SECRET}" ]&&return 1 +account_name_check_refresh_token="${1:-}" +refresh_token_regex='[0-9]//[0-9A-Za-z_-]+' authorization_code_regex='[0-9]/[0-9A-Za-z_-]+' +_set_value direct refresh_token_name_check_refresh_token "${account_name_check_refresh_token:+ACCOUNT_${account_name_check_refresh_token}_}REFRESH_TOKEN" +_set_value indirect refresh_token_value_check_refresh_token "${refresh_token_name_check_refresh_token:-}" +[ "${REFETCH_REFRESH_TOKEN:-false}" = "true" ]&&{ +unset refresh_token_value_check_refresh_token +} +[ -n "$refresh_token_value_check_refresh_token" ]&&{ +! _assert_regex "$refresh_token_regex" "$refresh_token_value_check_refresh_token"&&"${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token in config file, follow below steps.. " "-"&&unset refresh_token_value_check_refresh_token +} +[ -z "$refresh_token_value_check_refresh_token" ]&&{ +printf "\n"&&"${QUIET:-_print_center}" "normal" "If you have a refresh token generated, then type the token, else leave blank and press return key.." " " +printf "\n"&&"${QUIET:-_print_center}" "normal" " Refresh Token " "-"&&printf -- "-> " +read -r refresh_token_value_check_refresh_token +if [ -n "$refresh_token_value_check_refresh_token" ];then +"${QUIET:-_print_center}" "normal" " Checking refresh token.. " "-" +if _assert_regex "$refresh_token_regex" "$refresh_token_value_check_refresh_token";then +_set_value direct REFRESH_TOKEN "$refresh_token_value_check_refresh_token" +{ _check_access_token "$account_name_check_refresh_token" skip_check&&_update_config "$refresh_token_name_check_refresh_token" "$refresh_token_value_check_refresh_token" "$CONFIG"&&_clear_line 1;}||check_error_check_refresh_token=true +else +check_error_check_refresh_token=true +fi +[ -n "$check_error_check_refresh_token" ]&&"${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token given, follow below steps to generate.. " "-"&&unset refresh_token_value_check_refresh_token +else +"${QUIET:-_print_center}" "normal" " No Refresh token given, follow below steps to generate.. " "-"&&unset refresh_token_value_check_refresh_token +fi +[ -z "$refresh_token_value_check_refresh_token" ]&&{ +printf "\n"&&"${QUIET:-_print_center}" "normal" "Visit the below URL, tap on allow and then enter the code obtained" " " +URL="https://accounts.google.com/o/oauth2/auth?client_id=$CLIENT_ID&redirect_uri=$REDIRECT_URI&scope=$SCOPE&response_type=code&prompt=consent" +printf "\n%s\n" "$URL" +unset AUTHORIZATION_CODE authorization_code AUTHORIZATION_CODE_VALID response +until [ -n "$AUTHORIZATION_CODE" ]&&[ -n "$AUTHORIZATION_CODE_VALID" ];do +[ -n "$AUTHORIZATION_CODE" ]&&{ +if _assert_regex "$authorization_code_regex" "$AUTHORIZATION_CODE";then +AUTHORIZATION_CODE_VALID="true"&&continue +else +"${QUIET:-_print_center}" "normal" " Invalid CODE given, try again.. " "-"&&unset AUTHORIZATION_CODE authorization_code +fi +} +{ [ -z "$authorization_code" ]&&printf "\n"&&"${QUIET:-_print_center}" "normal" " Enter the authorization code " "-";}||_clear_line 1 +printf -- "-> \033[?7l" +read -r AUTHORIZATION_CODE&&authorization_code=1 +printf '\033[?7h' +done +response_check_refresh_token="$(_curl --compressed "$CURL_PROGRESS" -X POST \ +--data "code=$AUTHORIZATION_CODE&client_id=$CLIENT_ID&client_secret=$CLIENT_SECRET&redirect_uri=$REDIRECT_URI&grant_type=authorization_code" "$TOKEN_URL")"||: +_clear_line 1 1>&2 +refresh_token_value_check_refresh_token="$(printf "%s\n" "$response_check_refresh_token"|_json_value refresh_token 1 1)"||{ printf "%s\n" "Error: Cannot fetch refresh token, make sure the authorization code was correct."&&return 1;} +_set_value direct REFRESH_TOKEN "$refresh_token_value_check_refresh_token" +{ _check_access_token "$account_name_check_refresh_token" skip_check "$response_check_refresh_token"&&_update_config "$refresh_token_name_check_refresh_token" "$refresh_token_value_check_refresh_token" "$CONFIG";}||return 1 +} +printf "\n" +} +_set_value direct "$refresh_token_name_check_refresh_token" "$refresh_token_value_check_refresh_token" +_set_value direct REFRESH_TOKEN "$refresh_token_value_check_refresh_token" +return 0 +} +_check_access_token(){ +export CLIENT_ID CLIENT_SECRET REFRESH_TOKEN CONFIG QUIET +[ -z "${CLIENT_ID:+${CLIENT_SECRET:+$REFRESH_TOKEN}}" ]&&return 1 +account_name_check_access_token="${1:-}" no_check_check_access_token="${2:-false}" response_json_check_access_token="${3:-}" +unset token_name_check_access_token token_expiry_name_check_access_token token_value_check_access_token token_expiry_value_check_access_token response_check_access_token +access_token_regex='ya29\.[0-9A-Za-z_-]+' +token_name_check_access_token="${account_name_check_access_token:+ACCOUNT_${account_name_check_access_token}_}ACCESS_TOKEN" +token_expiry_name_check_access_token="${token_name_check_access_token}_EXPIRY" +_set_value indirect token_value_check_access_token "$token_name_check_access_token" +_set_value indirect token_expiry_value_check_access_token "$token_expiry_name_check_access_token" +[ "$no_check_check_access_token" = skip_check ]||[ -z "$token_value_check_access_token" ]||[ "${token_expiry_value_check_access_token:-0}" -lt "$(_epoch)" ]||! _assert_regex "$access_token_regex" "$token_value_check_access_token"&&{ +response_check_access_token="${response_json_check_access_token:-$(curl --compressed -s -X POST --data \ +"client_id=$CLIENT_ID&client_secret=$CLIENT_SECRET&refresh_token=$REFRESH_TOKEN&grant_type=refresh_token" "$TOKEN_URL")}"||: +if token_value_check_access_token="$(printf "%s\n" "$response_check_access_token"|_json_value access_token 1 1)";then +token_expiry_value_check_access_token="$(($(_epoch)+$(printf "%s\n" "$response_check_access_token"|_json_value expires_in 1 1)-1))" +_update_config "$token_name_check_access_token" "$token_value_check_access_token" "$CONFIG"||return 1 +_update_config "$token_expiry_name_check_access_token" "$token_expiry_value_check_access_token" "$CONFIG"||return 1 +else +"${QUIET:-_print_center}" "justify" "Error: Something went wrong" ", printing error." "=" 1>&2 +printf "%s\n" "$response_check_access_token" 1>&2 +printf "%s\n" "If refresh token has expired, then use --oauth-refetch-refresh-token to refetch refresh token, if the error is not clear make a issue on github repository." +return 1 +fi +} +_set_value direct ACCESS_TOKEN "$token_value_check_access_token" +_set_value direct ACCESS_TOKEN_EXPIRY "$token_expiry_value_check_access_token" +_set_value direct INITIAL_ACCESS_TOKEN "$ACCESS_TOKEN" +return 0 +} +_reload_config(){ +export CONFIG +{ [ -r "$CONFIG" ]&&_parse_config "$CONFIG";}||{ printf "" >>"$CONFIG"||return 1;} +return 0 +} +_token_bg_service(){ +export MAIN_PID ACCESS_TOKEN ACCESS_TOKEN_EXPIRY TMPFILE +[ -z "$MAIN_PID" ]&&return 0 +printf "%b\n" "ACCESS_TOKEN=\"$ACCESS_TOKEN\"\nACCESS_TOKEN_EXPIRY=\"$ACCESS_TOKEN_EXPIRY\"" >|"${TMPFILE}_ACCESS_TOKEN" +{ +until ! kill -0 "$MAIN_PID" 2>|/dev/null 1>&2;do +. "${TMPFILE}_ACCESS_TOKEN" +CURRENT_TIME="$(_epoch)" +REMAINING_TOKEN_TIME="$((ACCESS_TOKEN_EXPIRY-CURRENT_TIME))" +if [ "$REMAINING_TOKEN_TIME" -le 300 ];then +CONFIG="${TMPFILE}_ACCESS_TOKEN" _timeout 30 _check_access_token "" skip_check||: +else +TOKEN_PROCESS_TIME_TO_SLEEP="$(if [ "$REMAINING_TOKEN_TIME" -le 301 ];then +printf "0\n" +else +printf "%s\n" "$((REMAINING_TOKEN_TIME-300))" +fi)" +sleep "$TOKEN_PROCESS_TIME_TO_SLEEP" +fi +sleep 1 +done +}& +export ACCESS_TOKEN_SERVICE_PID="$!" +return 0 +} +_bytes_to_human(){ +b_bytes_to_human="$(printf "%.0f\n" "${1:-0}")" s_bytes_to_human=0 +d_bytes_to_human='' type_bytes_to_human='' +while [ "$b_bytes_to_human" -gt 1024 ];do +d_bytes_to_human="$(printf ".%02d" $((b_bytes_to_human%1024*100/1024)))" +b_bytes_to_human=$((b_bytes_to_human/1024))&&s_bytes_to_human=$((s_bytes_to_human+=1)) +done +j=0&&for i in B KB MB GB TB PB EB YB ZB;do +j="$((j+=1))"&&[ "$((j-1))" = "$s_bytes_to_human" ]&&type_bytes_to_human="$i"&&break +continue +done +printf "%s\n" "$b_bytes_to_human$d_bytes_to_human $type_bytes_to_human" +} +_check_debug(){ +export DEBUG QUIET +if [ -n "$DEBUG" ];then +set -x&&PS4='-> ' +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "$2";}||{ printf "%s%s\n" "$2" "$3";};} +_clear_line(){ :;}&&_move_cursor(){ :;}&&_newline(){ :;} +else +if [ -z "$QUIET" ];then +if _support_ansi_escapes;then +if ! _required_column_size;then +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "[ $2 ]";}||{ printf "%s\n" "[ $2$3 ]";};} +fi +export EXTRA_LOG="_print_center" CURL_PROGRESS="-#" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" +else +_print_center(){ { [ $# = 3 ]&&printf "%s\n" "[ $2 ]";}||{ printf "%s\n" "[ $2$3 ]";};} +_clear_line(){ :;}&&_move_cursor(){ :;} +fi +_newline(){ printf "%b" "$1";} +else +_print_center(){ :;}&&_clear_line(){ :;}&&_move_cursor(){ :;}&&_newline(){ :;} +fi +set +x +fi +} +_check_internet(){ +"${EXTRA_LOG:-}" "justify" "Checking Internet Connection.." "-" +if ! _timeout 10 curl -Is google.com --compressed;then +_clear_line 1 +"${QUIET:-_print_center}" "justify" "Error: Internet connection" " not available." "=" +return 1 +fi +_clear_line 1 +} +_clear_line(){ +printf "\033[%sA\033[2K" "$1" +} +_dirname(){ +dir_dirname="${1:-.}" +dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}"&&[ -n "${dir_dirname##*/*}" ]&&dir_dirname=. +dir_dirname="${dir_dirname%/*}"&&dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" +printf '%s\n' "${dir_dirname:-/}" +} +_display_time(){ +t_display_time="$1" day_display_time="$((t_display_time/60/60/24))" +hr_display_time="$((t_display_time/60/60%24))" min_display_time="$((t_display_time/60%60))" sec_display_time="$((t_display_time%60))" +[ "$day_display_time" -gt 0 ]&&printf '%d days ' "$day_display_time" +[ "$hr_display_time" -gt 0 ]&&printf '%d hrs ' "$hr_display_time" +[ "$min_display_time" -gt 0 ]&&printf '%d minute(s) ' "$min_display_time" +[ "$day_display_time" -gt 0 ]||[ "$hr_display_time" -gt 0 ]||[ "$min_display_time" -gt 0 ]&&printf 'and ' +printf '%d seconds\n' "$sec_display_time" +} +_get_latest_sha(){ +export TYPE TYPE_VALUE REPO +unset latest_sha_get_latest_sha raw_get_latest_sha +case "${1:-$TYPE}" in +branch)\ +latest_sha_get_latest_sha="$(\ +raw_get_latest_sha="$(curl --compressed -s https://github.com/"${3:-$REPO}"/commits/"${2:-$TYPE_VALUE}".atom -r 0-2000)" +_tmp="$(printf "%s\n" "$raw_get_latest_sha"|grep -o 'Commit\/.*<' -m1||:)"&&_tmp="${_tmp##*\/}"&&printf "%s\n" "${_tmp%%<*}")" +;; +release)\ +latest_sha_get_latest_sha="$(\ +raw_get_latest_sha="$(curl -L --compressed -s https://github.com/"${3:-$REPO}"/releases/"${2:-$TYPE_VALUE}")" +_tmp="$(printf "%s\n" "$raw_get_latest_sha"|grep '="/'"${3:-$REPO}""/commit" -m1||:)"&&_tmp="${_tmp##*commit\/}"&&printf "%s\n" "${_tmp%%\"*}")" +;; +*): +esac +printf "%b" "${latest_sha_get_latest_sha:+$latest_sha_get_latest_sha\n}" +} +_json_escape(){ +mode_json_escape="${1:?Missing mode}" input_json_escape="${2:?Provide Input}" output_json_escape="" +if [ "$mode_json_escape" = "j" ];then +output_json_escape="$(printf "%s" "$input_json_escape"|sed \ +-e "s|\\\|\\\\\\\|g" \ +-e "s|\/|\\\/|g" \ +-e 's/\"/\\\"/g' \ +-e "s/$(printf '\t')/\\t/g" \ +-e "s/$(printf '\r')/\\r/g" \ +-e "s/$(printf '\f')/\\f/g")" +else +output_json_escape="$(printf "%s" "$input_json_escape"|sed \ +-e "s/$(printf '\t')/\\t/g" \ +-e "s/$(printf '\r')/\\r/g" \ +-e "s/$(printf '\f')/\\f/g")" +fi +output_json_escape="$(printf "%s" "$output_json_escape"|awk '{printf "%s%s",sep,$0; sep="\\n"} END{print ""}')" +printf "%s" "$output_json_escape" +} +_json_value(){ +{ [ "$2" -gt 0 ] 2>|/dev/null&&no_of_lines_json_value="$2";}||: +{ [ "$3" -gt 0 ] 2>|/dev/null&&num_json_value="$3";}||{ ! [ "$3" = all ]&&num_json_value=1;} +_tmp="$(grep -o "\"$1\"\:.*" ${no_of_lines_json_value:+-m} $no_of_lines_json_value)"||return 1 +printf "%s\n" "$_tmp"|sed -e 's|.*"'"$1""\":||" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "$num_json_value"p||: +return 0 +} +_parse_config(){ +_config_file_parse_config="${1:?Error: Profile config file}" +print_parse_config="${2:-false}" +[ -r "$_config_file_parse_config" ]||{ +printf "%s\n" "Error: Given config file ( $_config_file_parse_config ) is not readable." +return 1 +} +while IFS='=' read -r key val;do +{ [ -n "$key" ]&&[ -n "$val" ]&&[ -n "${key##\#*}" ];}||continue +key="${key#"${key%%[![:space:]]*}"}" +val="${val#"${val%%[![:space:]]*}"}" +key="${key%"${key##*[![:space:]]}"}" +val="${val%"${val##*[![:space:]]}"}" +case "$val" in +\"*\")val="${val#\"}" val="${val%\"}";; +\'*\')val="${val#\'}" val="${val%\'}";; +*): +esac +export "$key=$val" 2>/dev/null||printf "%s\n" "Warning: $key is not a valid variable name." +[ "$print_parse_config" = true ]&&echo "$key=$val" +done <"$_config_file_parse_config" +return 0 +} +_print_center(){ +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +term_cols_print_center="${COLUMNS:-}" +type_print_center="$1" filler_print_center="" +case "$type_print_center" in +normal)out_print_center="$2"&&symbol_print_center="$3";; +justify)if +[ $# = 3 ] +then +input1_print_center="$2" symbol_print_center="$3" to_print_print_center="" out_print_center="" +to_print_print_center="$((term_cols_print_center-5))" +{ [ "${#input1_print_center}" -gt "$to_print_print_center" ]&&out_print_center="[ $(printf "%.${to_print_print_center}s\n" "$input1_print_center")..]";}||{ out_print_center="[ $input1_print_center ]";} +else +input1_print_center="$2" input2_print_center="$3" symbol_print_center="$4" to_print_print_center="" temp_print_center="" out_print_center="" +to_print_print_center="$((term_cols_print_center*47/100))" +{ [ "${#input1_print_center}" -gt "$to_print_print_center" ]&&temp_print_center=" $(printf "%.${to_print_print_center}s\n" "$input1_print_center")..";}||{ temp_print_center=" $input1_print_center";} +to_print_print_center="$((term_cols_print_center*46/100))" +{ [ "${#input2_print_center}" -gt "$to_print_print_center" ]&&temp_print_center="$temp_print_center$(printf "%.${to_print_print_center}s\n" "$input2_print_center").. ";}||{ temp_print_center="$temp_print_center$input2_print_center ";} +out_print_center="[$temp_print_center]" +fi +;; +*)return 1 +esac +str_len_print_center="${#out_print_center}" +[ "$str_len_print_center" -ge "$((term_cols_print_center-1))" ]&&{ +printf "%s\n" "$out_print_center"&&return 0 +} +filler_print_center_len="$(((term_cols_print_center-str_len_print_center)/2))" +i_print_center=1&&while [ "$i_print_center" -le "$filler_print_center_len" ];do +filler_print_center="$filler_print_center$symbol_print_center"&&i_print_center="$((i_print_center+1))" +done +printf "%s%s%s" "$filler_print_center" "$out_print_center" "$filler_print_center" +[ "$(((term_cols_print_center-str_len_print_center)%2))" -ne 0 ]&&printf "%s" "$symbol_print_center" +printf "\n" +return 0 +} +_print_center_quiet(){ +{ [ $# = 3 ]&&printf "%s\n" "$2";}||{ printf "%s%s\n" "$2" "$3";} +} +_support_ansi_escapes(){ +unset ansi_escapes +case "${TERM:-}" in +xterm*|rxvt*|urxvt*|linux*|vt*|screen*)ansi_escapes="true";; +*): +esac +{ [ -t 2 ]&&[ -n "$ansi_escapes" ]&&return 0;}||return 1 +} +_timeout(){ +timeout_timeout="${1:?Error: Specify Timeout}"&&shift +{ +"$@"& +child="$!" +trap -- "" TERM +{ +sleep "$timeout_timeout" +kill -9 "$child" +}& +wait "$child" +} 2>|/dev/null 1>&2 +} +_update_config(){ +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +value_name_update_config="$1" value_update_config="$2" config_path_update_config="$3" +! [ -f "$config_path_update_config" ]&&: >|"$config_path_update_config" +chmod u+w -- "$config_path_update_config"||return 1 +printf "%s\n%s\n" "$(grep -v -e "^$" -e "^$value_name_update_config=" -- "$config_path_update_config"||:)" \ +"$value_name_update_config=\"$value_update_config\"" >|"$config_path_update_config"||return 1 +chmod a-w-r-x,u+r -- "$config_path_update_config"||return 1 +return 0 +} +_check_existing_file()(export EXTRA_LOG CURL_PROGRESS_EXTRA API_URL API_VERSION +[ $# -lt 2 ]&&printf "Missing arguments\n"&&return 1 +name_check_existing_file="$1" rootdir_check_existing_file="$2" mode_check_existing_file="$3" param_value_check_existing_file="$4" +unset query_check_existing_file response_check_existing_file id_check_existing_file +"$EXTRA_LOG" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2 +query_check_existing_file="$(_url_encode "name=\"$name_check_existing_file\" and '$rootdir_check_existing_file' in parents and trashed=false and 'me' in writers")" +response_check_existing_file="$(_api_request "$CURL_PROGRESS_EXTRA" \ +"$API_URL/drive/$API_VERSION/files?q=$query_check_existing_file&fields=files(id,name,mimeType${mode_check_existing_file:+,$mode_check_existing_file})&supportsAllDrives=true&includeItemsFromAllDrives=true"||:)"&&_clear_line 1 1>&2 +_clear_line 1 1>&2 +printf "%s\n" "$response_check_existing_file"|_json_value id 1 1 2>|/dev/null 1>&2||return 1 +[ -n "$mode_check_existing_file" ]&&{ +[ "$(printf "%s\n" "$response_check_existing_file"|_json_value "$mode_check_existing_file" 1 1)" = "$param_value_check_existing_file" ]||return 1 +} +printf "%s\n" "$response_check_existing_file" +return 0) +_clone_file(){ +export DESCRIPTION_FILE CHECK_MODE SKIP_DUPLICATES QUIET API_URL API_VERSION CURL_PROGRESS +[ $# -lt 5 ]&&printf "Missing arguments\n"&&return 1 +job_clone_file="$1" file_id_clone_file="$2" file_root_id_clone_file="$3" name_clone_file="$4" size_clone_file="$5" md5_clone_file="$6" +unset post_data_clone_file response_clone_file readable_size_clone_file description_clone_file&&STRING="Cloned" +readable_size_clone_file="$(_bytes_to_human "$size_clone_file")" +escaped_name_clone_file="$(_json_escape j "$name_clone_file")" print_name_clone_file="$(_json_escape p "$name_clone_file")" +[ -n "$DESCRIPTION_FILE" ]&&{ +description_clone_file="$(printf "%s\n" "$DESCRIPTION_FILE"|sed -e "s|%f|$name_clone_file|g|" -e "s|%f|$readable_size_clone_file|g|")" +description_clone_file="$(_json_escape j "$description_clone_file")" +} +post_data_clone_file="{\"parents\": [\"$file_root_id_clone_file\"]${description_clone_file:+,\"description\":\"$description_clone_file\"}}" +_print_center "justify" "$print_name_clone_file " "| $readable_size_clone_file" "=" +if [ "$job_clone_file" = update ];then +unset file_check_json_clone_file check_value_type_clone_file check_value_clone_file +case "$CHECK_MODE" in +2)check_value_type_clone_file="size" check_value_clone_file="$size_clone_file";; +3)check_value_type_clone_file="md5Checksum" check_value_clone_file="$md5_clone_file";; +*): +esac +if file_check_json_clone_file="$(_check_existing_file "$escaped_name_clone_file" "$file_root_id_clone_file" "$check_value_type_clone_file" "$check_value_clone_file")";then +if [ -n "$SKIP_DUPLICATES" ];then +_collect_file_info "$file_check_json_clone_file" "$print_name_clone_file"||return 1 +_clear_line 1 +"${QUIET:-_print_center}" "justify" "$print_name_clone_file" " already exists." "="&&return 0 +else +_print_center "justify" "Overwriting file.." "-" +{ _file_id_clone_file="$(printf "%s\n" "$file_check_json_clone_file"|_json_value id 1 1)"&&post_data_clone_file="$(_drive_info "$_file_id_clone_file" "parents,writersCanShare")";}||{ _error_logging_upload "$print_name_clone_file" "${post_data_clone_file:-$file_check_json_clone_file}"||return 1;} +if [ "$_file_id_clone_file" != "$file_id_clone_file" ];then +_api_request -s \ +-X DELETE \ +"$API_URL/drive/$API_VERSION/files/$_file_id_clone_file?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>|/dev/null 1>&2||: +STRING="Updated" +else +_collect_file_info "$file_check_json_clone_file" "$print_name_clone_file"||return 1 +fi +fi +else +_print_center "justify" "Cloning file.." "-" +fi +else +_print_center "justify" "Cloning file.." "-" +fi +response_clone_file="$(_api_request $CURL_PROGRESS \ +-X POST \ +-H "Content-Type: application/json; charset=UTF-8" \ +-d "$post_data_clone_file" \ +"$API_URL/drive/$API_VERSION/files/$file_id_clone_file/copy?supportsAllDrives=true&includeItemsFromAllDrives=true"||:)" +for _ in 1 2 3;do _clear_line 1;done +_collect_file_info "$response_clone_file" "$print_name_clone_file"||return 1 +"${QUIET:-_print_center}" "justify" "$print_name_clone_file " "| $readable_size_clone_file | $STRING" "=" +return 0 +} +_create_directory(){ +export EXTRA_LOG CURL_PROGRESS_EXTRA API_VERSION API_URL +[ $# -lt 2 ]&&printf "Missing arguments\n"&&return 1 +dirname_create_directory="${1##*/}" rootdir_create_directory="$2" +unset query_create_directory search_response_create_directory folder_id_create_directory +escaped_dirname_create_directory="$(_json_escape j "$dirname_create_directory")" +print_dirname_create_directory="$(_json_escape p "$dirname_create_directory")" +"$EXTRA_LOG" "justify" "Creating GDRIVE DIR:" " $print_dirname_create_directory" "-" 1>&2 +query_create_directory="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name=\"$escaped_dirname_create_directory\" and trashed=false and '$rootdir_create_directory' in parents")" +search_response_create_directory="$(_api_request "$CURL_PROGRESS_EXTRA" \ +"$API_URL/drive/$API_VERSION/files?q=$query_create_directory&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true"||:)"&&_clear_line 1 1>&2 +if ! folder_id_create_directory="$(printf "%s\n" "$search_response_create_directory"|_json_value id 1 1)";then +unset create_folder_post_data_create_directory create_folder_response_create_directory +create_folder_post_data_create_directory="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"$escaped_dirname_create_directory\",\"parents\": [\"$rootdir_create_directory\"]}" +create_folder_response_create_directory="$(_api_request "$CURL_PROGRESS_EXTRA" \ +-X POST \ +-H "Content-Type: application/json; charset=UTF-8" \ +-d "$create_folder_post_data_create_directory" \ +"$API_URL/drive/$API_VERSION/files?fields=id&supportsAllDrives=true&includeItemsFromAllDrives=true"||:)"&&_clear_line 1 1>&2 +fi +_clear_line 1 1>&2 +{ folder_id_create_directory="${folder_id_create_directory:-$(printf "%s\n" "$create_folder_response_create_directory"|_json_value id 1 1)}"&&printf "%s\n" "$folder_id_create_directory";}||{ printf "%s\n" "$create_folder_response_create_directory" 1>&2&&return 1;} +return 0 +} +_drive_info(){ +export EXTRA_LOG CURL_PROGRESS_EXTRA API_URL API_VERSION +[ $# -lt 2 ]&&printf "Missing arguments\n"&&return 1 +folder_id_drive_info="$1" fetch_drive_info="$2" +unset search_response_drive_info +"$EXTRA_LOG" "justify" "Fetching info.." "-" 1>&2 +search_response_drive_info="$(_api_request "$CURL_PROGRESS_EXTRA" \ +"$API_URL/drive/$API_VERSION/files/$folder_id_drive_info?fields=$fetch_drive_info&supportsAllDrives=true&includeItemsFromAllDrives=true"||:)"&&_clear_line 1 1>&2 +_clear_line 1 1>&2 +printf "%b" "${search_response_drive_info:+$search_response_drive_info\n}" +return 0 +} +_extract_id(){ +[ $# = 0 ]&&printf "Missing arguments\n"&&return 1 +LC_ALL=C id_extract_id="$1" +case "$id_extract_id" in +*'drive.google.com'*'id='*)_tmp="${id_extract_id##*id=}"&&_tmp="${_tmp%%\?*}"&&id_extract_id="${_tmp%%\&*}";; +*'drive.google.com'*'file/d/'*|'http'*'docs.google.com'*'/d/'*)_tmp="${id_extract_id##*\/d\/}"&&_tmp="${_tmp%%\/*}"&&_tmp="${_tmp%%\?*}"&&id_extract_id="${_tmp%%\&*}";; +*'drive.google.com'*'drive'*'folders'*)_tmp="${id_extract_id##*\/folders\/}"&&_tmp="${_tmp%%\?*}"&&id_extract_id="${_tmp%%\&*}";; +*): +esac +printf "%b" "${id_extract_id:+$id_extract_id\n}" +} +_upload_file(){ +export QUIET DESCRIPTION_FILE CHECK_MODE SKIP_DUPLICATES API_URL API_VERSION INFO_PATH +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +job_upload_file="$1" input_upload_file="$2" folder_id_upload_file="$3" +unset slug_upload_file inputname_upload_file extension_upload_file inputsize_upload_file readable_size_upload_file request_method_upload_file \ +url_upload_file postdata_upload_file uploadlink_upload_file upload_body_upload_file mime_type_upload_file description_upload_file \ +resume_args1_upload_file resume_args2_upload_file resume_args3_upload_file +slug_upload_file="${input_upload_file##*/}" +escaped_slug_upload_file="$(_json_escape j "$slug_upload_file")" print_slug_upload_file="$(_json_escape p "$slug_upload_file")" +inputname_upload_file="${slug_upload_file%.*}" +extension_upload_file="${slug_upload_file##*.}" +inputsize_upload_file="$(($(wc -c <"$input_upload_file")))"&&content_length_upload_file="$inputsize_upload_file" +readable_size_upload_file="$(_bytes_to_human "$inputsize_upload_file")" +[ "$inputname_upload_file" = "$extension_upload_file" ]&&{ +mime_type_upload_file="$(file --brief --mime-type "$input_upload_file"||mimetype --output-format %m "$input_upload_file")" 2>|/dev/null||{ +"${QUIET:-_print_center}" "justify" "Error: file or mimetype command not found." "="&&printf "\n" +exit 1 +} +} +[ -n "$DESCRIPTION_FILE" ]&&{ +description_upload_file="$(printf "%s\n" "$DESCRIPTION_FILE"|sed -e "s|%f|$slug_upload_file|g" -e "s|%f|$readable_size_upload_file|g" -e "s|%m|$mime_type_upload_file|g")" +description_upload_file="$(_json_escape j "$description_upload_file")" +} +_print_center "justify" "$print_slug_upload_file" " | $readable_size_upload_file" "=" +[ "$job_upload_file" = update ]&&{ +unset file_check_json_upload_file check_value_upload_file +case "$CHECK_MODE" in +2)check_value_type_upload_file="size" check_value_upload_file="$inputsize_upload_file";; +3)\ +check_value_type_upload_file="md5Checksum" +check_value_upload_file="$(md5sum "$input_upload_file")"||{ +"${QUIET:-_print_center}" "justify" "Error: cannot calculate md5sum of given file." "=" 1>&2 +return 1 +} +check_value_upload_file="${check_value_upload_file%% *}" +;; +*): +esac +if file_check_json_upload_file="$(_check_existing_file "$escaped_slug_upload_file" "$folder_id_upload_file" "$check_value_type_upload_file" "$check_value_upload_file")";then +if [ -n "$SKIP_DUPLICATES" ];then +_collect_file_info "$file_check_json_upload_file" "$print_slug_upload_file"||return 1 +STRING="Skipped" _normal_logging_upload +return 0 +else +request_method_upload_file="PATCH" +_file_id_upload_file="$(printf "%s\n" "$file_check_json_upload_file"|_json_value id 1 1)"||{ _error_logging_upload "$print_slug_upload_file" "$file_check_json_upload_file"||return 1;} +url_upload_file="$API_URL/upload/drive/$API_VERSION/files/$_file_id_upload_file?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" +postdata_upload_file="{\"mimeType\": \"$mime_type_upload_file\",\"name\": \"$escaped_slug_upload_file\",\"addParents\": [\"$folder_id_upload_file\"]${description_upload_file:+,\"description\":\"$description_upload_file\"}}" +STRING="Updated" +fi +else +job_upload_file="create" +fi +} +[ "$job_upload_file" = create ]&&{ +url_upload_file="$API_URL/upload/drive/$API_VERSION/files?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" +request_method_upload_file="POST" +postdata_upload_file="{\"mimeType\": \"$mime_type_upload_file\",\"name\": \"$escaped_slug_upload_file\",\"parents\": [\"$folder_id_upload_file\"]${description_upload_file:+,\"description\":\"$description_upload_file\"}}" +STRING="Uploaded" +} +__file_upload_file="$INFO_PATH/${print_slug_upload_file}__::__${folder_id_upload_file}__::__$inputsize_upload_file" +if [ -r "$__file_upload_file" ];then +uploadlink_upload_file="$(cat "$__file_upload_file"||:)" +http_code_upload_file="$(curl --compressed -s -X PUT "$uploadlink_upload_file" -o /dev/null --write-out %"{http_code}")"||: +case "$http_code_upload_file" in +308)\ +uploaded_range_upload_file="$(\ +raw_upload_file="$(curl --compressed -s -X PUT \ +-H "Content-Range: bytes */$content_length_upload_file" \ +--url "$uploadlink_upload_file" --globoff -D -||:)"&&printf "%s\n" "${raw_upload_file##*[R,r]ange: bytes=0-}"|while +read -r line +do printf "%s\n" "${line%%"$(printf '\r')"}"&&break;done)" +if [ "$uploaded_range_upload_file" -gt 0 ] 2>|/dev/null;then +_print_center "justify" "Resuming interrupted upload.." "-"&&_newline "\n" +content_range_upload_file="$(printf "bytes %s-%s/%s\n" "$((uploaded_range_upload_file+1))" "$((inputsize_upload_file-1))" "$inputsize_upload_file")" +content_length_upload_file="$((inputsize_upload_file-$((uploaded_range_upload_file+1))))" +resume_args1_upload_file='-s' resume_args2_upload_file='--http1.1' resume_args3_upload_file="Content-Range: $content_range_upload_file" +_upload_file_from_uri _clear_line +_collect_file_info "$upload_body_upload_file" "$print_slug_upload_file"||return 1 +_normal_logging_upload +_remove_upload_session +else +_full_upload||return 1 +fi +;; +4[0-9][0-9]|000)_full_upload||return 1 +;; +201|200)\ +upload_body_upload_file="$http_code_upload_file" +_collect_file_info "$upload_body_upload_file" "$print_slug_upload_file"||return 1 +_normal_logging_upload +_remove_upload_session +;; +*): +esac +else +_full_upload||return 1 +fi +return 0 +} +_generate_upload_link(){ +"${EXTRA_LOG:-}" "justify" "Generating upload link.." "-" 1>&2 +uploadlink_upload_file="$(_api_request "${CURL_PROGRESS_EXTRA:-}" \ +-X "$request_method_upload_file" \ +-H "Content-Type: application/json; charset=UTF-8" \ +-H "X-Upload-Content-Type: $mime_type_upload_file" \ +-H "X-Upload-Content-Length: $inputsize_upload_file" \ +-d "$postdata_upload_file" \ +"$url_upload_file" \ +-D -||:)"&&_clear_line 1 1>&2 +_clear_line 1 1>&2 +case "$uploadlink_upload_file" in +*'ocation: '*'upload_id'*)uploadlink_upload_file="$(printf "%s\n" "${uploadlink_upload_file##*[L,l]ocation: }"|while read -r line;do printf "%s\n" "${line%%"$(printf '\r')"}"&&break;done)"&&return 0;; +*)return 1 +esac +return 0 +} +_upload_file_from_uri(){ +_print_center "justify" "Uploading.." "-" +upload_body_upload_file="$(_api_request ${CURL_PROGRESS:-} \ +-X PUT \ +-H "Content-Type: $mime_type_upload_file" \ +-H "Content-Length: $content_length_upload_file" \ +-H "Slug: $print_slug_upload_file" \ +-T "$input_upload_file" \ +-o- \ +--url "$uploadlink_upload_file" \ +--globoff \ +${CURL_SPEED:-} ${resume_args1_upload_file:-} ${resume_args2_upload_file:-} \ +-H "$resume_args3_upload_file"||:)" +[ -z "${VERBOSE_PROGRESS:-}" ]&&for _ in 1 2;do _clear_line 1;done&&"${1:-:}" +return 0 +} +_normal_logging_upload(){ +[ -z "${VERBOSE_PROGRESS:-}" ]&&_clear_line 1 +"${QUIET:-_print_center}" "justify" "$slug_upload_file " "| $readable_size_upload_file | ${STRING:-}" "=" +return 0 +} +_log_upload_session(){ +[ "$inputsize_upload_file" -gt 1000000 ]&&printf "%s\n" "$uploadlink_upload_file" >|"$__file_upload_file" +return 0 +} +_remove_upload_session(){ +rm -f "$__file_upload_file" +return 0 +} +_full_upload(){ +_generate_upload_link||{ _error_logging_upload "$print_slug_upload_file" "$uploadlink_upload_file"||return 1;} +_log_upload_session +_upload_file_from_uri +_collect_file_info "$upload_body_upload_file" "$print_slug_upload_file"||return 1 +_normal_logging_upload +_remove_upload_session +return 0 +} +_share_id(){ +[ $# -lt 2 ]&&printf "Missing arguments\n"&&return 1 +id_share_id="$1" role_share_id="${2:?Missing role}" share_email_share_id="$3" role_share_id="reader" type_share_id="${share_email_share_id:+user}" +unset post_data_share_id response_share_id +"$EXTRA_LOG" "justify" "Sharing.." "-" 1>&2 +post_data_share_id="{\"role\":\"$role_share_id\",\"type\":\"${type_share_id:-anyone}\"${share_email_share_id:+,\"emailAddress\":\"$share_email_share_id\"}}" +response_share_id="$(_api_request "$CURL_PROGRESS_EXTRA" \ +-X POST \ +-H "Content-Type: application/json; charset=UTF-8" \ +-d "$post_data_share_id" \ +"$API_URL/drive/$API_VERSION/files/$id_share_id/permissions?supportsAllDrives=true&includeItemsFromAllDrives=true"||:)"&&_clear_line 1 1>&2 +_clear_line 1 1>&2 +{ printf "%s\n" "$response_share_id"|_json_value id 1 1 2>|/dev/null 1>&2&&return 0;}||{ printf "%s\n" "Error: Cannot Share." 1>&2&&printf "%s\n" "$response_share_id" 1>&2&&return 1;} +} +_api_request(){ +. "${TMPFILE:-}_ACCESS_TOKEN" +curl --compressed \ +-H "Authorization: Bearer ${ACCESS_TOKEN:-}" \ +"$@" +} +_collect_file_info(){ +json_collect_file_info="$1" info_collect_file_info="" +FILE_ID="$(printf "%s\n" "$json_collect_file_info"|_json_value id 1 1)"||{ _error_logging_upload "$2" "$json_collect_file_info"||return 1;} +{ [ -z "$LOG_FILE_ID" ]||[ -d "$LOG_FILE_ID" ];}&&return 0 +info_collect_file_info="Link: https://drive.google.com/open?id=$FILE_ID +Name: $(printf "%s\n" "$json_collect_file_info"|_json_value name 1 1||:) +ID: $FILE_ID +Type: $(printf "%s\n" "$json_collect_file_info"|_json_value mimeType 1 1||:)" +printf "%s\n\n" "$info_collect_file_info" >>"$LOG_FILE_ID" +return 0 +} +_error_logging_upload(){ +log_error_logging_upload="$2" +"${QUIET:-_print_center}" "justify" "Upload ERROR" ", ${1:-} not ${STRING:-uploaded}." "=" 1>&2 +case "$log_error_logging_upload" in +*'"message": "User rate limit exceeded."'*)printf "%s\n\n%s\n" "$log_error_logging_upload" \ +"Today's upload limit reached for this account. Use another account to upload or wait for tomorrow." \ +1>&2 +export RETRY=0 +;; +''|*)printf "%s\n" "$log_error_logging_upload" 1>&2 +esac +printf "\n\n\n" 1>&2 +return 1 +} +_get_rootdir_id(){ +file_gen_final_list="${1:?Error: give filename}" +rootdir_gen_final_list="$(_dirname "$file_gen_final_list")" +temp_gen_final_list="$(printf "%s\n" "${DIRIDS:?Error: DIRIDS Missing}"|grep -F "|:_//_:|$rootdir_gen_final_list|:_//_:|"||:)" +printf "%s\n" "${temp_gen_final_list%%"|:_//_:|$rootdir_gen_final_list|:_//_:|"}" +return 0 +} +_upload_file_main(){ +[ $# -lt 2 ]&&printf "Missing arguments\n"&&return 1 +file_upload_file_main="$2" sleep_upload_file_main=0 +{ [ "$1" = parse ]&&dirid_upload_file_main="$(_get_rootdir_id "$file_upload_file_main")";}||dirid_upload_file_main="$3" +retry_upload_file_main="${RETRY:-0}"&&unset RETURN_STATUS +until [ "$retry_upload_file_main" -le 0 ]&&[ -n "$RETURN_STATUS" ];do +if [ -n "$4" ];then +{ _upload_file "${UPLOAD_MODE:-create}" "$file_upload_file_main" "$dirid_upload_file_main" 2>|/dev/null 1>&2&&RETURN_STATUS=1&&break;}||RETURN_STATUS=2 +else +{ _upload_file "${UPLOAD_MODE:-create}" "$file_upload_file_main" "$dirid_upload_file_main"&&RETURN_STATUS=1&&break;}||RETURN_STATUS=2 +fi +[ "$((retry_upload_file_main-=1))" -lt 1 ]&&sleep "$((sleep_upload_file_main+=1))" +continue +done +[ -n "$4" ]&&{ +{ [ "$RETURN_STATUS" = 1 ]&&printf "%s\n" "$file_upload_file_main";}||printf "%s\n" "$file_upload_file_main" 1>&2 +} +return 0 +} +_upload_folder(){ +export VERBOSE VERBOSE_PROGRESS NO_OF_PARALLEL_JOBS TMPFILE NO_OF_FILES +[ $# -lt 3 ]&&printf "Missing arguments\n"&&return 1 +mode_upload_folder="$1" PARSE_MODE="$2" files_upload_folder="$3" ID="${4:-}" +SUCCESS_STATUS=0 SUCCESS_FILES="" ERROR_STATUS=0 ERROR_FILES="" +case "$mode_upload_folder" in +normal)[ "$PARSE_MODE" = parse ]&&_clear_line 1&&_newline "\n" +while read -r file <&4;do +_upload_file_main "$PARSE_MODE" "$file" "$ID" +{ [ "$RETURN_STATUS" = 1 ]&&: "$((SUCCESS_STATUS+=1))"&&SUCCESS_FILES="$(printf "%b\n" "${SUCCESS_STATUS:+$SUCCESS_STATUS\n}$file")";}||{ : "$((ERROR_STATUS+=1))"&&ERROR_FILES="$(printf "%b\n" "${ERROR_STATUS:+$ERROR_STATUS\n}$file")";} +if [ -n "${VERBOSE:-$VERBOSE_PROGRESS}" ];then +_print_center "justify" "Status: $SUCCESS_STATUS Uploaded" " | $ERROR_STATUS Failed" "="&&_newline "\n" +else +for _ in 1 2;do _clear_line 1;done +_print_center "justify" "Status: $SUCCESS_STATUS Uploaded" " | $ERROR_STATUS Failed" "=" +fi +done 4<|"$TMPFILE"SUCCESS 2>|"$TMPFILE"ERROR)& +pid="$!" +until [ -f "$TMPFILE"SUCCESS ]||[ -f "$TMPFILE"ERORR ];do sleep 0.5;done +[ "$PARSE_MODE" = parse ]&&_clear_line 1 +_newline "\n" +until ! kill -0 "$pid" 2>|/dev/null 1>&2;do +SUCCESS_STATUS="$(($(wc -l <"$TMPFILE"SUCCESS)))" +ERROR_STATUS="$(($(wc -l <"$TMPFILE"ERROR)))" +sleep 1 +[ "$((SUCCESS_STATUS+ERROR_STATUS))" != "$TOTAL" ]&&_clear_line 1&&"${QUIET:-_print_center}" "justify" "Status" ": $SUCCESS_STATUS Uploaded | $ERROR_STATUS Failed" "=" +TOTAL="$((SUCCESS_STATUS+ERROR_STATUS))" +done +SUCCESS_STATUS="$(($(wc -l <"$TMPFILE"SUCCESS)))" SUCCESS_FILES="$(cat "$TMPFILE"SUCCESS)" +ERROR_STATUS="$(($(wc -l <"$TMPFILE"ERROR)))" ERROR_FILES="$(cat "$TMPFILE"ERROR)" +export SUCCESS_FILES ERROR_FILES +;; +*): +esac +return 0 +} +_cleanup_config(){ +config="${1:?Error: Missing config}"&&unset values_regex _tmp +! [ -f "$config" ]&&return 0 +while read -r line <&4&&[ -n "$line" ];do +expiry_value_name="${line%%=*}" +token_value_name="${expiry_value_name%%_EXPIRY}" +_tmp="${line##*=}"&&_tmp="${_tmp%\"}"&&expiry="${_tmp#\"}" +[ "$expiry" -le "$(_epoch)" ]&&values_regex="${values_regex:+$values_regex|}$expiry_value_name=\".*\"|$token_value_name=\".*\"" +done 4<|"$config"&&chmod "a-w-r-x,u+r" -- "$config" +return 0 +} +_setup_arguments(){ +[ $# = 0 ]&&printf "Missing arguments\n"&&return 1 +unset CONTINUE_WITH_NO_INPUT +export CURL_PROGRESS="-s" EXTRA_LOG=":" CURL_PROGRESS_EXTRA="-s" +INFO_PATH="$HOME/.google-drive-upload" CONFIG_INFO="$INFO_PATH/google-drive-upload.configpath" +[ -f "$CONFIG_INFO" ]&&. "$CONFIG_INFO" +CONFIG="${CONFIG:-$HOME/.googledrive.conf}" +unset ROOT_FOLDER CLIENT_ID CLIENT_SECRET REFRESH_TOKEN ACCESS_TOKEN +export API_URL="https://www.googleapis.com" +export API_VERSION="v3" \ +SCOPE="$API_URL/auth/drive" \ +REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" \ +TOKEN_URL="https://accounts.google.com/o/oauth2/token" +_parse_arguments "_parser_setup_flags" "$@"||return 1 +_check_debug +[ -n "$VERBOSE_PROGRESS" ]&&unset VERBOSE&&export CURL_PROGRESS="" +[ -n "$QUIET" ]&&export CURL_PROGRESS="-s" +mkdir -p "$INFO_PATH"||return 1 +[ -n "$DELETE_ACCOUNT_NAME" ]&&_delete_account "$DELETE_ACCOUNT_NAME" +[ -n "$LIST_ACCOUNTS" ]&&_all_accounts +[ -z "${INPUT_FILE_1:-${INPUT_ID_1:-$FOLDERNAME}}" ]&&{ +[ -z "${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-$NEW_ACCOUNT_NAME}}" ]&&_short_help +[ -n "${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-}}" ]&&exit 0 +[ -n "$NEW_ACCOUNT_NAME" ]&&CONTINUE_WITH_NO_INPUT="true" +} +[ -z "$CHECK_MODE" ]&&{ +case "${SKIP_DUPLICATES:-$OVERWRITE}" in +"Overwrite")export CHECK_MODE="1";; +"Skip Existing")export CHECK_MODE="2";; +*): +esac +} +return 0 +} +_setup_traps(){ +export SUPPORT_ANSI_ESCAPES TMPFILE ACCESS_TOKEN ACCESS_TOKEN_EXPIRY INITIAL_ACCESS_TOKEN ACCOUNT_NAME CONFIG ACCESS_TOKEN_SERVICE_PID +_cleanup(){ +[ -n "$SUPPORT_ANSI_ESCAPES" ]&&printf "\033[?25h\033[?7h" +{ +[ -f "${TMPFILE}_ACCESS_TOKEN" ]&&{ +. "${TMPFILE}_ACCESS_TOKEN" +[ "$INITIAL_ACCESS_TOKEN" = "$ACCESS_TOKEN" ]||{ +_update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN" "$ACCESS_TOKEN" "$CONFIG" +_update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN_EXPIRY" "$ACCESS_TOKEN_EXPIRY" "$CONFIG" +} +}||: 1>|/dev/null +[ -n "$ACCESS_TOKEN_SERVICE_PID" ]&&{ +token_service_pids="$(ps --ppid="$ACCESS_TOKEN_SERVICE_PID" -o pid=)" +kill "$ACCESS_TOKEN_SERVICE_PID" +}||: 1>|/dev/null +script_children_pids="$(ps --ppid="$MAIN_PID" -o pid=)" +kill $token_service_pids $script_children_pids 1>|/dev/null +rm -f "${TMPFILE:?}"* +export abnormal_exit&&if [ -n "$abnormal_exit" ];then +printf "\n\n%s\n" "Script exited manually." +kill "${_SCRIPT_KILL_SIGNAL:--9}" -$$& +else +{ _cleanup_config "$CONFIG"&&[ "${GUPLOAD_INSTALLED_WITH:-}" = script ]&&_auto_update;} 1>|/dev/null& +fi +} 2>|/dev/null||: +return 0 +} +trap 'abnormal_exit="1" ; exit' INT TERM +trap '_cleanup' EXIT +trap '' TSTP +export MAIN_PID="$$" +} +_setup_root_dir(){ +export ROOTDIR ROOT_FOLDER ROOT_FOLDER_NAME QUIET ACCOUNT_NAME CONFIG UPDATE_DEFAULT_ROOTDIR +_check_root_id(){ +_setup_root_dir_json="$(_drive_info "$(_extract_id "$ROOT_FOLDER")" "id")" +if ! rootid_setup_root_dir="$(printf "%s\n" "$_setup_root_dir_json"|_json_value id 1 1)";then +if printf "%s\n" "$_setup_root_dir_json"|grep "File not found" -q;then +"${QUIET:-_print_center}" "justify" "Given root folder" " ID/URL invalid." "=" 1>&2 +else +printf "%s\n" "$_setup_root_dir_json" 1>&2 +fi +return 1 +fi +ROOT_FOLDER="$rootid_setup_root_dir" +"${1:-:}" "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" "$ROOT_FOLDER" "$CONFIG"||return 1 +return 0 +} +_check_root_id_name(){ +ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "$ROOT_FOLDER")" "name"|_json_value name 1 1||:)" +"${1:-:}" "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER_NAME" "$ROOT_FOLDER_NAME" "$CONFIG"||return 1 +return 0 +} +_set_value indirect ROOT_FOLDER "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" +_set_value indirect ROOT_FOLDER_NAME "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER_NAME" +if [ -n "${ROOTDIR:-}" ];then +ROOT_FOLDER="$ROOTDIR"&&{ _check_root_id "$UPDATE_DEFAULT_ROOTDIR"||return 1;}&&unset ROOT_FOLDER_NAME +elif [ -z "$ROOT_FOLDER" ];then +{ [ -t 1 ]&&"${QUIET:-_print_center}" "normal" "Enter root folder ID or URL, press enter for default ( root )" " "&&printf -- "-> "&&read -r ROOT_FOLDER&&[ -n "$ROOT_FOLDER" ]&&{ _check_root_id _update_config||return 1;};}||{ +ROOT_FOLDER="root" +_update_config "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" "$ROOT_FOLDER" "$CONFIG"||return 1 +}&&printf "\n\n" +elif [ -z "$ROOT_FOLDER_NAME" ];then +_check_root_id_name _update_config||return 1 +fi +[ -z "$ROOT_FOLDER_NAME" ]&&{ _check_root_id_name "$UPDATE_DEFAULT_ROOTDIR"||return 1;} +return 0 +} +_setup_workspace(){ +export FOLDERNAME ROOT_FOLDER ROOT_FOLDER_NAME WORKSPACE_FOLDER_ID WORKSPACE_FOLDER_NAME +if [ -z "$FOLDERNAME" ];then +WORKSPACE_FOLDER_ID="$ROOT_FOLDER" +WORKSPACE_FOLDER_NAME="$ROOT_FOLDER_NAME" +else +WORKSPACE_FOLDER_ID="$(_create_directory "$FOLDERNAME" "$ROOT_FOLDER")"||{ printf "%s\n" "$WORKSPACE_FOLDER_ID" 1>&2&&return 1;} +WORKSPACE_FOLDER_NAME="$(_drive_info "$WORKSPACE_FOLDER_ID" name|_json_value name 1 1)"||{ printf "%s\n" "$WORKSPACE_FOLDER_NAME" 1>&2&&return 1;} +fi +return 0 +} +_process_arguments(){ +export SHARE SHARE_ROLE SHARE_EMAIL HIDE_INFO QUIET SKIP_DUPLICATES OVERWRITE \ +WORKSPACE_FOLDER_ID SOURCE_UTILS EXTRA_LOG SKIP_SUBDIRS INCLUDE_FILES EXCLUDE_FILES \ +QUIET PARALLEL_UPLOAD VERBOSE VERBOSE_PROGRESS CHECK_MODE DESCRIPTION DESCRIPTION_ALL \ +UPLOAD_MODE HIDE_INFO +_share_and_print_link(){ +"${SHARE:-:}" "${1:-}" "$SHARE_ROLE" "$SHARE_EMAIL" +[ -z "$HIDE_INFO" ]&&{ +_print_center "justify" "DriveLink" "${SHARE:+ (SHARED[$(printf "%.1s" "$SHARE_ROLE")])}" "-" +_support_ansi_escapes&&[ "$((COLUMNS))" -gt 45 ] 2>|/dev/null&&_print_center "normal" '^ ^ ^' ' ' +"${QUIET:-_print_center}" "normal" "https://drive.google.com/open?id=${1:-}" " " +} +return 0 +} +_SEEN="" index_process_arguments=0 +TOTAL_FILE_INPUTS="$((TOTAL_FILE_INPUTS<0?0:TOTAL_FILE_INPUTS))" +until [ "$index_process_arguments" -eq "$TOTAL_FILE_INPUTS" ];do +input="" +_set_value i input "INPUT_FILE_$((index_process_arguments+=1))" +case "$_SEEN" in +*"$input"*)continue;; +*)_SEEN="$_SEEN$input" +esac +if [ -f "$input" ];then +export DESCRIPTION_FILE="$DESCRIPTION" +_print_center "justify" "Given Input" ": FILE" "=" +_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "="&&_newline "\n" +_upload_file_main noparse "$input" "$WORKSPACE_FOLDER_ID" +if [ "${RETURN_STATUS:-}" = 1 ];then +_share_and_print_link "${FILE_ID:-}" +printf "\n" +else +for _ in 1 2;do _clear_line 1;done&&continue +fi +elif [ -d "$input" ];then +input="$(cd "$input"&&pwd)"||return 1 +unset EMPTY +export DESCRIPTION_FILE="${DESCRIPTION_ALL+:$DESCRIPTION}" +_print_center "justify" "Given Input" ": FOLDER" "-" +_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "="&&_newline "\n" +FOLDER_NAME="${input##*/}"&&"$EXTRA_LOG" "justify" "Folder: $FOLDER_NAME" "=" +NEXTROOTDIRID="$WORKSPACE_FOLDER_ID" +"$EXTRA_LOG" "justify" "Processing folder.." "-" +[ -z "$SKIP_SUBDIRS" ]&&"$EXTRA_LOG" "justify" "Indexing subfolders.." "-" +DIRNAMES="$(find "$input" -type d -not -empty)" +NO_OF_FOLDERS="$(($(printf "%s\n" "$DIRNAMES"|wc -l)))"&&NO_OF_SUB_FOLDERS="$((NO_OF_FOLDERS-1))" +[ -z "$SKIP_SUBDIRS" ]&&_clear_line 1 +[ "$NO_OF_SUB_FOLDERS" = 0 ]&&SKIP_SUBDIRS="true" +"$EXTRA_LOG" "justify" "Indexing files.." "-" +FILENAMES="$(_tmp='find "'$input'" -type f -name "*" '$INCLUDE_FILES' '$EXCLUDE_FILES''&&eval "$_tmp")" +_clear_line 1 +if [ -n "$SKIP_SUBDIRS" ];then +if [ -n "$FILENAMES" ];then +NO_OF_FILES="$(($(printf "%s\n" "$FILENAMES"|wc -l)))" +for _ in 1 2;do _clear_line 1;done +"${QUIET:-_print_center}" "justify" "Folder: $FOLDER_NAME " "| $NO_OF_FILES File(s)" "="&&printf "\n" +"$EXTRA_LOG" "justify" "Creating folder.." "-" +{ ID="$(_create_directory "$input" "$NEXTROOTDIRID")"&&export ID;}||{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-"&&printf "%s\n\n\n" "$ID" 1>&2&&continue;} +_clear_line 1&&DIRIDS="$ID" +[ -z "${PARALLEL_UPLOAD:-${VERBOSE:-$VERBOSE_PROGRESS}}" ]&&_newline "\n" +_upload_folder "${PARALLEL_UPLOAD:-normal}" noparse "$FILENAMES" "$ID" +[ -n "${PARALLEL_UPLOAD:+${VERBOSE:-$VERBOSE_PROGRESS}}" ]&&_newline "\n\n" +else +for _ in 1 2;do _clear_line 1;done&&EMPTY=1 +fi +else +if [ -n "$FILENAMES" ];then +NO_OF_FILES="$(($(printf "%s\n" "$FILENAMES"|wc -l)))" +for _ in 1 2;do _clear_line 1;done +"${QUIET:-_print_center}" "justify" "$FOLDER_NAME " "| $((NO_OF_FILES)) File(s) | $((NO_OF_SUB_FOLDERS)) Sub-folders" "=" +_newline "\n"&&"$EXTRA_LOG" "justify" "Creating Folder(s).." "-"&&_newline "\n" +unset status +while read -r dir <&4&&{ [ -n "$dir" ]||continue;};do +[ -n "$status" ]&&__dir="$(_dirname "$dir")"&&__temp="$(printf "%s\n" "$DIRIDS"|grep -F "|:_//_:|$__dir|:_//_:|")"&&NEXTROOTDIRID="${__temp%%"|:_//_:|$__dir|:_//_:|"}" +NEWDIR="${dir##*/}"&&_print_center "justify" "Name: $NEWDIR" "-" 1>&2 +ID="$(_create_directory "$NEWDIR" "$NEXTROOTDIRID")"||{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-"&&printf "%s\n\n\n" "$ID" 1>&2&&continue;} +DIRIDS="$(printf "%b%s|:_//_:|%s|:_//_:|\n" "${DIRIDS:+$DIRIDS\n}" "$ID" "$dir")" +for _ in 1 2;do _clear_line 1 1>&2;done +"$EXTRA_LOG" "justify" "Status" ": $((status+=1)) / $((NO_OF_FOLDERS))" "=" 1>&2 +done 4<>"$log_file_name" +printf "%s\n" "To see the failed files, open \"$log_file_name\"" +printf "%s\n" "To retry the failed uploads only, use -d / --skip-duplicates flag. See log file for more help." +} +else +printf "%s\n" "$ERROR_FILES" +fi +} +printf "\n" +else +for _ in 1 2 3;do _clear_line 1;done +"${QUIET:-_print_center}" 'justify' "Empty Folder" ": $FOLDER_NAME" "=" 1>&2 +printf "\n" +fi +fi +done +_SEEN="" index_process_arguments=0 +TOTAL_ID_INPUTS="$((TOTAL_ID_INPUTS<0?0:TOTAL_ID_INPUTS))" +until [ "$index_process_arguments" -eq "$TOTAL_ID_INPUTS" ];do +gdrive_id="" +_set_value gdrive_id "INPUT_ID_$((index_process_arguments+=1))" +case "$_SEEN" in +*"$gdrive_id"*)continue;; +*)_SEEN="$_SEEN$gdrive_id" +esac +_print_center "justify" "Given Input" ": ID" "=" +"$EXTRA_LOG" "justify" "Checking if id exists.." "-" +[ "$CHECK_MODE" = "md5Checksum" ]&¶m="md5Checksum" +json="$(_drive_info "$gdrive_id" "name,mimeType,size${param:+,$param}")"||: +if ! printf "%s\n" "$json"|_json_value code 1 1 2>|/dev/null 1>&2;then +type="$(printf "%s\n" "$json"|_json_value mimeType 1 1||:)" +name="$(printf "%s\n" "$json"|_json_value name 1 1||:)" +size="$(printf "%s\n" "$json"|_json_value size 1 1||:)" +[ "$CHECK_MODE" = "md5Checksum" ]&&md5="$(printf "%s\n" "$json"|_json_value md5Checksum 1 1||:)" +for _ in 1 2;do _clear_line 1;done +case "$type" in +*folder*)export DESCRIPTION_FILE="${DESCRIPTION_ALL+:$DESCRIPTION}" +"${QUIET:-_print_center}" "justify" "Folder not supported." "=" 1>&2&&_newline "\n" 1>&2&&continue +;; +*)export DESCRIPTION_FILE="$DESCRIPTION" +_print_center "justify" "Given Input" ": File ID" "=" +_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "="&&_newline "\n" +_clone_file "${UPLOAD_MODE:-create}" "$gdrive_id" "$WORKSPACE_FOLDER_ID" "$name" "$size" "$md5"||{ for _ in 1 2;do _clear_line 1;done&&continue;} +esac +_share_and_print_link "$FILE_ID" +printf "\n" +else +_clear_line 1 +"${QUIET:-_print_center}" "justify" "File ID (${HIDE_INFO:-gdrive_id})" " invalid." "=" 1>&2 +printf "\n" +fi +done +return 0 +} +_main_helper(){ +_setup_arguments "$@"||exit 1 +"${SKIP_INTERNET_CHECK:-_check_internet}"||exit 1 +TMPFILE="$(command -v mktemp 1>|/dev/null&&mktemp -u)"||TMPFILE="$(pwd)/.$(_t="$(_epoch)"&&printf "%s\n" "$((_t*_t))").tmpfile" +export TMPFILE +_setup_traps +"$EXTRA_LOG" "justify" "Checking credentials.." "-" +{ _check_credentials&&_clear_line 1;}||{ "${QUIET:-_print_center}" "normal" "[ Error: Credentials checking failed ]" "="&&exit 1;} +"${QUIET:-_print_center}" "normal" " Account: $ACCOUNT_NAME " "=" +"$EXTRA_LOG" "justify" "Checking root dir.." "-" +{ _setup_root_dir&&_clear_line 1;}||{ "${QUIET:-_print_center}" "normal" "[ Error: Rootdir setup failed ]" "="&&exit 1;} +_print_center "justify" "Root dir properly configured." "=" +[ -n "$CONTINUE_WITH_NO_INPUT" ]&&exit 0 +"$EXTRA_LOG" "justify" "Checking Workspace Folder.." "-" +{ _setup_workspace&&for _ in 1 2;do _clear_line 1;done;}||{ "${QUIET:-_print_center}" "normal" "[ Error: Workspace setup failed ]" "="&&exit 1;} +_print_center "justify" "Workspace Folder: $WORKSPACE_FOLDER_NAME" "=" +"${HIDE_INFO:-_print_center}" "normal" " $WORKSPACE_FOLDER_ID " "-"&&_newline "\n" +START="$(_epoch)" +[ -n "$SUPPORT_ANSI_ESCAPES" ]&&printf "\033[?25l" +_process_arguments +END="$(_epoch)" +DIFF="$((END-START))" +"${QUIET:-_print_center}" "normal" " Time Elapsed: ""$((DIFF/60))"" minute(s) and ""$((DIFF%60))"" seconds. " "=" +} +set +a +main(){ +[ $# = 0 ]&&{ +printf "No valid arguments provided, use -h/--help flag to see usage.\n" +exit 0 +} +export _SHELL="sh" +if [ -z "$SELF_SOURCE" ];then +export UTILS_FOLDER="${UTILS_FOLDER:-$PWD}" +export COMMON_PATH="$UTILS_FOLDER/common" +export SOURCE_UTILS=". '$UTILS_FOLDER/sh/common-utils.sh' && + . '$COMMON_PATH/parser.sh' && + . '$COMMON_PATH/flags.sh' && + . '$COMMON_PATH/auth-utils.sh' && + . '$COMMON_PATH/common-utils.sh' && + . '$COMMON_PATH/drive-utils.sh' && + . '$COMMON_PATH/upload-utils.sh' + . '$COMMON_PATH/upload-common.sh'" +else +SCRIPT_PATH="$(cd "$(_dirname "$0")"&&pwd)/${0##*\/}"&&export SCRIPT_PATH +export SOURCE_UTILS="SOURCED_GUPLOAD=true . '$SCRIPT_PATH'" +fi +eval "$SOURCE_UTILS"||{ printf "Error: Unable to source util files.\n"&&exit 1;} +set -o noclobber +export _SCRIPT_KILL_SIGNAL="-9" +_main_helper "$@"||exit 1 +} +{ [ -z "$SOURCED_GUPLOAD" ]&&main "$@";}||: diff --git a/sh/release/gsync b/sh/release/gsync deleted file mode 100755 index 50af0bc..0000000 --- a/sh/release/gsync +++ /dev/null @@ -1,1191 +0,0 @@ -#!/usr/bin/env sh -SELF_SOURCE="true" -# Functions that will used in core script -# posix functions - -################################################### -# Convert bytes to human readable form -# Globals: None -# Required Arguments: 1 -# ${1} = Positive integer ( bytes ) -# Result: Print human readable form. -# Reference: -# https://unix.stackexchange.com/a/538015 -################################################### -_bytes_to_human() { - b_bytes_to_human="$(printf "%.0f\n" "${1:-0}")" s_bytes_to_human=0 - d_bytes_to_human='' type_bytes_to_human='' - while [ "${b_bytes_to_human}" -gt 1024 ]; do - d_bytes_to_human="$(printf ".%02d" $((b_bytes_to_human % 1024 * 100 / 1024)))" - b_bytes_to_human=$((b_bytes_to_human / 1024)) && s_bytes_to_human=$((s_bytes_to_human += 1)) - done - j=0 && for i in B KB MB GB TB PB EB YB ZB; do - j="$((j += 1))" && [ "$((j - 1))" = "${s_bytes_to_human}" ] && type_bytes_to_human="${i}" && break - continue - done - printf "%s\n" "${b_bytes_to_human}${d_bytes_to_human} ${type_bytes_to_human}" -} - -################################################### -# Check if debug is enabled and enable command trace -# Globals: 2 variables, 1 function -# Varibles - DEBUG, QUIET -# Function - _is_terminal -# Arguments: None -# Result: If DEBUG -# Present - Enable command trace and change print functions to avoid spamming. -# Absent - Disable command trace -# Check QUIET, then check terminal size and enable print functions accordingly. -################################################### -_check_debug() { - if [ -n "${DEBUG}" ]; then - set -x && PS4='-> ' - _print_center() { { [ $# = 3 ] && printf "%s\n" "${2}"; } || { printf "%s%s\n" "${2}" "${3}"; }; } - _clear_line() { :; } && _newline() { :; } - else - if [ -z "${QUIET}" ]; then - # check if running in terminal and support ansi escape sequences - if _support_ansi_escapes; then - ! COLUMNS="$(_get_columns_size)" || [ "${COLUMNS:-0}" -lt 45 ] 2>| /dev/null && - _print_center() { { [ $# = 3 ] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - export CURL_PROGRESS="-#" EXTRA_LOG="_print_center" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" - else - _print_center() { { [ $# = 3 ] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - _clear_line() { :; } - fi - _newline() { printf "%b" "${1}"; } - else - _print_center() { :; } && _clear_line() { :; } && _newline() { :; } - fi - set +x - fi -} - -################################################### -# Check internet connection. -# Probably the fastest way, takes about 1 - 2 KB of data, don't check for more than 10 secs. -# Globals: 3 functions -# _print_center, _clear_line, _timeout -# Arguments: None -# Result: On -# Success - Nothing -# Error - print message and exit 1 -################################################### -_check_internet() { - "${EXTRA_LOG}" "justify" "Checking Internet Connection.." "-" - if ! _timeout 10 curl -Is google.com --compressed; then - _clear_line 1 - "${QUIET:-_print_center}" "justify" "Error: Internet connection" " not available." "=" - return 1 - fi - _clear_line 1 -} - -################################################### -# Move cursor to nth no. of line and clear it to the begining. -# Globals: None -# Arguments: 1 -# ${1} = Positive integer ( line number ) -# Result: Read description -################################################### -_clear_line() { - printf "\033[%sA\033[2K" "${1}" -} - -################################################### -# Alternative to dirname command -# Globals: None -# Arguments: 1 -# ${1} = path of file or folder -# Result: read description -# Reference: -# https://github.com/dylanaraps/pure-sh-bible#file-paths -################################################### -_dirname() { - dir_dirname="${1:-.}" - dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" && [ "${dir_dirname##*/*}" ] && dir_dirname=. - dir_dirname="${dir_dirname%/*}" && dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" - printf '%s\n' "${dir_dirname:-/}" -} - -################################################### -# Convert given time in seconds to readable form -# 110 to 1 minute(s) and 50 seconds -# Globals: None -# Arguments: 1 -# ${1} = Positive Integer ( time in seconds ) -# Result: read description -# Reference: -# https://stackoverflow.com/a/32164707 -################################################### -_display_time() { - t_display_time="${1}" day_display_time="$((t_display_time / 60 / 60 / 24))" - hr_display_time="$((t_display_time / 60 / 60 % 24))" min_display_time="$((t_display_time / 60 % 60))" sec_display_time="$((t_display_time % 60))" - [ "${day_display_time}" -gt 0 ] && printf '%d days ' "${day_display_time}" - [ "${hr_display_time}" -gt 0 ] && printf '%d hrs ' "${hr_display_time}" - [ "${min_display_time}" -gt 0 ] && printf '%d minute(s) ' "${min_display_time}" - [ "${day_display_time}" -gt 0 ] || [ "${hr_display_time}" -gt 0 ] || [ "${min_display_time}" -gt 0 ] && printf 'and ' - printf '%d seconds\n' "${sec_display_time}" -} - -################################################### -# print column size -# use zsh or stty or tput -################################################### -_get_columns_size() { - { command -v bash 1>| /dev/null && bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1'; } || - { command -v zsh 1>| /dev/null && zsh -c 'printf "%s\n" "${COLUMNS}"'; } || - { command -v stty 1>| /dev/null && _tmp="$(stty size)" && printf "%s\n" "${_tmp##* }"; } || - { command -v tput 1>| /dev/null && tput cols; } || - return 1 -} - -################################################### -# Fetch latest commit sha of release or branch -# Do not use github rest api because rate limit error occurs -# Globals: None -# Arguments: 3 -# ${1} = "branch" or "release" -# ${2} = branch name or release name -# ${3} = repo name e.g labbots/google-drive-upload -# Result: print fetched sha -################################################### -_get_latest_sha() { - unset latest_sha_get_latest_sha raw_get_latest_sha - case "${1:-${TYPE}}" in - branch) - latest_sha_get_latest_sha="$( - raw_get_latest_sha="$(curl --compressed -s https://github.com/"${3:-${REPO}}"/commits/"${2:-${TYPE_VALUE}}".atom -r 0-2000)" - _tmp="$(printf "%s\n" "${raw_get_latest_sha}" | grep -o "Commit\\/.*<" -m1 || :)" && _tmp="${_tmp##*\/}" && printf "%s\n" "${_tmp%%<*}" - )" - ;; - release) - latest_sha_get_latest_sha="$( - raw_get_latest_sha="$(curl -L --compressed -s https://github.com/"${3:-${REPO}}"/releases/"${2:-${TYPE_VALUE}}")" - _tmp="$(printf "%s\n" "${raw_get_latest_sha}" | grep "=\"/""${3:-${REPO}}""/commit" -m1 || :)" && _tmp="${_tmp##*commit\/}" && printf "%s\n" "${_tmp%%\"*}" - )" - ;; - esac - printf "%b" "${latest_sha_get_latest_sha:+${latest_sha_get_latest_sha}\n}" -} - -################################################### -# Encode the given string to parse properly as json -# Globals: None -# Arguments: 2 -# ${1} = json or something else -# ${2} = input -# Result: if ${1} is j, then escape all chars, else only special chars -# Reference: -# https://tools.ietf.org/html/rfc7159#section-7 -################################################### -_json_escape() { - mode_json_escape="${1:?Missing mode}" input_json_escape="${2:?Provide Input}" output_json_escape="" - # just for refrence "s|'|\'|g" - if [ "${mode_json_escape}" = "j" ]; then - output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ - -e "s|\\\|\\\\\\\|g" \ - -e "s|\/|\\\/|g" \ - -e 's/\"/\\\"/g' \ - -e "s/$(printf '\t')/\\t/g" \ - -e "s/$(printf '\r')/\\r/g" \ - -e "s/$(printf '\f')/\\f/g")" - else - output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ - -e "s/$(printf '\t')/\\t/g" \ - -e "s/$(printf '\r')/\\r/g" \ - -e "s/$(printf '\f')/\\f/g")" - fi - # use awk because sed just messes up with newlines - output_json_escape="$(printf "%s" "${output_json_escape}" | awk '{printf "%s%s",sep,$0; sep="\\n"} END{print ""}')" - printf "%s" "${output_json_escape}" -} - -################################################### -# Method to extract specified field data from json -# Globals: None -# Arguments: 2 -# ${1} - value of field to fetch from json -# ${2} - Optional, no of lines to parse for the given field in 1st arg -# ${3} - Optional, nth number of value from extracted values, default it 1. -# Input: file | pipe -# _json_value "Arguments" < file -# echo something | _json_value "Arguments" -# Result: print extracted value -################################################### -_json_value() { - { [ "${2}" -gt 0 ] 2>| /dev/null && no_of_lines_json_value="${2}"; } || : - { [ "${3}" -gt 0 ] 2>| /dev/null && num_json_value="${3}"; } || { ! [ "${3}" = all ] && num_json_value=1; } - # shellcheck disable=SC2086 - _tmp="$(grep -o "\"${1}\"\:.*" ${no_of_lines_json_value:+-m} ${no_of_lines_json_value})" || return 1 - printf "%s\n" "${_tmp}" | sed -e "s/.*\"""${1}""\"://" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num_json_value}"p || : - return 0 -} - -################################################### -# Print a text to center interactively and fill the rest of the line with text specified. -# This function is fine-tuned to this script functionality, so may appear unusual. -# Globals: 1 variable -# COLUMNS -# Arguments: 4 -# If ${1} = normal -# ${2} = text to print -# ${3} = symbol -# If ${1} = justify -# If remaining arguments = 2 -# ${2} = text to print -# ${3} = symbol -# If remaining arguments = 3 -# ${2}, ${3} = text to print -# ${4} = symbol -# Result: read description -# Reference: -# https://gist.github.com/TrinityCoder/911059c83e5f7a351b785921cf7ecda -################################################### -_print_center() { - [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 - term_cols_print_center="${COLUMNS}" - type_print_center="${1}" filler_print_center="" - case "${type_print_center}" in - normal) out_print_center="${2}" && symbol_print_center="${3}" ;; - justify) - if [ $# = 3 ]; then - input1_print_center="${2}" symbol_print_center="${3}" to_print_print_center="" out_print_center="" - to_print_print_center="$((term_cols_print_center - 5))" - { [ "${#input1_print_center}" -gt "${to_print_print_center}" ] && out_print_center="[ $(printf "%.${to_print_print_center}s\n" "${input1_print_center}")..]"; } || - { out_print_center="[ ${input1_print_center} ]"; } - else - input1_print_center="${2}" input2_print_center="${3}" symbol_print_center="${4}" to_print_print_center="" temp_print_center="" out_print_center="" - to_print_print_center="$((term_cols_print_center * 47 / 100))" - { [ "${#input1_print_center}" -gt "${to_print_print_center}" ] && temp_print_center=" $(printf "%.${to_print_print_center}s\n" "${input1_print_center}").."; } || - { temp_print_center=" ${input1_print_center}"; } - to_print_print_center="$((term_cols_print_center * 46 / 100))" - { [ "${#input2_print_center}" -gt "${to_print_print_center}" ] && temp_print_center="${temp_print_center}$(printf "%.${to_print_print_center}s\n" "${input2_print_center}").. "; } || - { temp_print_center="${temp_print_center}${input2_print_center} "; } - out_print_center="[${temp_print_center}]" - fi - ;; - *) return 1 ;; - esac - - str_len_print_center="${#out_print_center}" - [ "${str_len_print_center}" -ge "$((term_cols_print_center - 1))" ] && { - printf "%s\n" "${out_print_center}" && return 0 - } - - filler_print_center_len="$(((term_cols_print_center - str_len_print_center) / 2))" - - i_print_center=1 && while [ "${i_print_center}" -le "${filler_print_center_len}" ]; do - filler_print_center="${filler_print_center}${symbol_print_center}" && i_print_center="$((i_print_center + 1))" - done - - printf "%s%s%s" "${filler_print_center}" "${out_print_center}" "${filler_print_center}" - [ "$(((term_cols_print_center - str_len_print_center) % 2))" -ne 0 ] && printf "%s" "${symbol_print_center}" - printf "\n" - - return 0 -} - -################################################### -# Quiet version of _print_center -################################################### -_print_center_quiet() { - { [ $# = 3 ] && printf "%s\n" "${2}"; } || printf "%s%s\n" "${2}" "${3}" -} - -################################################### -# Evaluates value1=value2 -# Globals: None -# Arguments: 3 -# ${1} = direct ( d ) or indirect ( i ) - ( evaluation mode ) -# ${2} = var name -# ${3} = var value -# Result: export value1=value2 -################################################### -_set_value() { - case "${1:?}" in - d | direct) export "${2:?}=${3}" ;; - i | indirect) export "${2:?}=$(eval printf "%s" \"\$"${3}"\")" ;; - esac -} - -################################################### -# Check if script terminal supports ansi escapes -# Globals: 1 variable -# TERM -# Arguments: None -# Result: return 1 or 0 -################################################### -_support_ansi_escapes() { - unset ansi_escapes - case "${TERM}" in - xterm* | rxvt* | urxvt* | linux* | vt* | screen*) ansi_escapes="true" ;; - esac - { [ -t 2 ] && [ -n "${ansi_escapes}" ] && return 0; } || return 1 -} - -################################################### -# Alternative to timeout command -# Globals: None -# Arguments: 1 and rest -# ${1} = amount of time to sleep -# rest = command to execute -# Result: Read description -# Reference: -# https://stackoverflow.com/a/24416732 -################################################### -_timeout() { - timeout_timeout="${1:?Error: Specify Timeout}" && shift - { - "${@}" & - child="${!}" - trap -- "" TERM - { - sleep "${timeout_timeout}" - kill -9 "${child}" - } & - wait "${child}" - } 2>| /dev/null 1>&2 -} - -################################################### -# Config updater -# Incase of old value, update, for new value add. -# Globals: None -# Arguments: 3 -# ${1} = value name -# ${2} = value -# ${3} = config path -# Result: read description -################################################### -_update_config() { - [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 - value_name_update_config="${1}" value_update_config="${2}" config_path_update_config="${3}" - ! [ -f "${config_path_update_config}" ] && : >| "${config_path_update_config}" # If config file doesn't exist. - chmod u+w "${config_path_update_config}" || return 1 - printf "%s\n%s\n" "$(grep -v -e "^$" -e "^${value_name_update_config}=" "${config_path_update_config}" || :)" \ - "${value_name_update_config}=\"${value_update_config}\"" >| "${config_path_update_config}" || return 1 - chmod a-w-r-x,u+r "${config_path_update_config}" || return 1 - return 0 -} - -################################################### -# Encode the given string to parse properly in network requests -# Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string -# Reference: -# https://stackoverflow.com/a/41405682 -################################################### -_url_encode() ( - LC_ALL=C LANG=C - awk 'BEGIN {while (y++ < 125) z[sprintf("%c", y)] = y - while (y = substr(ARGV[1], ++j, 1)) - q = y ~ /[[:alnum:]]_.!~*\47()-]/ ? q y : q sprintf("%%%02X", z[y]) - print q}' "${1}" -) -# Sync a FOLDER to google drive forever using labbots/google-drive-upload -# shellcheck source=/dev/null - -_usage() { - printf "%b" " -The script can be used to sync your local folder to google drive. - -Utilizes google-drive-upload bash scripts.\n -Usage: ${0##*/} [options.. ]\n -Options:\n - -d | --directory - Gdrive foldername.\n - -k | --kill - to kill the background job using pid number ( -p flags ) or used with input, can be used multiple times.\n - -j | --jobs - See all background jobs that were started and still running.\n - Use --jobs v/verbose to more information for jobs.\n - -p | --pid - Specify a pid number, used for --jobs or --kill or --info flags, can be used multiple times.\n - -i | --info - See information about a specific sync using pid_number ( use -p flag ) or use with input, can be used multiple times.\n - -t | --time - Amount of time to wait before try to sync again in background.\n - To set wait time by default, use ${0##*/} -t default='3'. Replace 3 with any positive integer.\n - -l | --logs - To show the logs after starting a job or show log of existing job. Can be used with pid number ( -p flag ). - Note: If multiple pid numbers or inputs are used, then will only show log of first input as it goes on forever. - -a | --arguments - Additional arguments for gupload commands. e.g: ${0##*/} -a '-q -o -p 4 -d'.\n - To set some arguments by default, use ${0##*/} -a default='-q -o -p 4 -d'.\n - -fg | --foreground - This will run the job in foreground and show the logs.\n - -in | --include 'pattern' - Only include the files with the given pattern to upload.\n - e.g: ${0##*/} local_folder --include "*1*", will only include with files with pattern '1' in the name.\n - -ex | --exclude 'pattern' - Exclude the files with the given pattern from uploading.\n - e.g: ${0##*/} local_folder --exclude "*1*", will exclude all files with pattern '1' in the name.\n - -c | --command 'command name'- Incase if gupload command installed with any other name or to use in systemd service.\n - --sync-detail-dir 'dirname' - Directory where a job information will be stored. - Default: ${HOME}/.google-drive-upload\n - -s | --service 'service name' - To generate systemd service file to setup background jobs on boot.\n - -D | --debug - Display script command trace, use before all the flags to see maximum script trace.\n - -h | --help - Display usage instructions.\n" - exit 0 -} - -_short_help() { - printf "No valid arguments provided, use -h/--help flag to see usage.\n" - exit 0 -} - -################################################### -# Check if a pid exists by using ps -# Globals: None -# Arguments: 1 -# ${1}" = pid number of a sync job -# Result: return 0 or 1 -################################################### -_check_pid() { - { ps -p "${1}" 2>| /dev/null 1>&2 && return 0; } || return 1 -} - -################################################### -# Show information about a specific sync job -# Globals: 1 variable, 1 function -# Variable - SYNC_LIST -# Functions - _setup_loop_variables -# Arguments: 1 -# ${1}" = pid number of a sync job -# ${2}" = anything: Prints extra information ( optional ) -# ${3}" = all information about a job ( optional ) -# Result: read description -################################################### -_get_job_info() { - unset local_folder_get_job_info times_get_job_info extra_get_job_info - pid_get_job_info="${1}" && input_get_job_info="${3:-$(grep "${pid_get_job_info}" "${SYNC_LIST}" || :)}" - - if [ -n "${input_get_job_info}" ]; then - if times_get_job_info="$(ps -p "${pid_get_job_info}" -o etimes --no-headers)"; then - printf "\n%s\n" "PID: ${pid_get_job_info}" - _tmp="${input_get_job_info#*"|:_//_:|"}" && local_folder_get_job_info="${_tmp%%"|:_//_:|"*}" - - printf "Local Folder: %s\n" "${local_folder_get_job_info}" - printf "Drive Folder: %s\n" "${input_get_job_info##*"|:_//_:|"}" - printf "Running Since: %s\n" "$(_display_time "${times_get_job_info}")" - - [ -n "${2}" ] && { - extra_get_job_info="$(ps -p "${pid_get_job_info}" -o %cpu,%mem --no-headers || :)" - printf "CPU usage:%s\n" "${extra_get_job_info% *}" - printf "Memory usage: %s\n" "${extra_get_job_info##* }" - _setup_loop_variables "${local_folder_get_job_info}" "${input_get_job_info##*"|:_//_:|"}" - printf "Success: %s\n" "$(($(wc -l < "${SUCCESS_LOG}")))" - printf "Failed: %s\n" "$(($(wc -l < "${ERROR_LOG}")))" - } - RETURN_STATUS=0 - else - RETURN_STATUS=1 - fi - else - RETURN_STATUS=11 - fi - return 0 -} - -################################################### -# Remove a sync job information from database -# Globals: 2 variables -# SYNC_LIST, SYNC_DETAIL_DIR -# Arguments: 1 -# ${1} = pid number of a sync job -# Result: read description -################################################### -_remove_job() { - unset input_remove_job local_folder_remove_job drive_folder_remove_job new_list_remove_job - pid_remove_job="${1}" - - if [ -n "${pid_remove_job}" ]; then - input_remove_job="$(grep "${pid_remove_job}" "${SYNC_LIST}" || :)" - _tmp="${input_remove_job#*"|:_//_:|"}" && local_folder_remove_job="${_tmp%%"|:_//_:|"*}" - drive_folder_remove_job="${input_remove_job##*"|:_//_:|"}" - new_list_remove_job="$(grep -v "${pid_remove_job}" "${SYNC_LIST}" || :)" - printf "%s\n" "${new_list_remove_job}" >| "${SYNC_LIST}" - fi - - rm -rf "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-${2}}${local_folder_remove_job:-${3}}" - # Cleanup dir if empty - { [ -z "$(find "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-${2}}" -type f)" ] && rm -rf "${SYNC_DETAIL_DIR:?}/${drive_folder_remove_job:-${2}}"; } 2>| /dev/null 1>&2 - return 0 -} - -################################################### -# Kill a sync job and do _remove_job -# Globals: 1 function -# _remove_job -# Arguments: 1 -# ${1}" = pid number of a sync job -# Result: read description -################################################### -_kill_job() { - pid_kill_job="${1}" - kill -9 "${pid_kill_job}" 2>| /dev/null 1>&2 || : - _remove_job "${pid_kill_job}" - printf "Killed.\n" -} - -################################################### -# Show total no of sync jobs running -# Globals: 1 variable, 2 functions -# Variable - SYNC_LIST -# Functions - _get_job_info, _remove_job -# Arguments: 1 -# ${1}" = v/verbose: Prints extra information ( optional ) -# Result: read description -################################################### -_show_jobs() { - unset list_show_job pid_show_job no_task_show_job - total_show_job=0 list_show_job="$(grep -v '^$' "${SYNC_LIST}" || :)" - printf "%s\n" "${list_show_job}" >| "${SYNC_LIST}" - - while read -r line <&4; do - if [ -n "${line}" ]; then - _tmp="${line%%"|:_//_:|"*}" && pid_show_job="${_tmp##*: }" - _get_job_info "${pid_show_job}" "${1}" "${line}" - { [ "${RETURN_STATUS}" = 1 ] && _remove_job "${pid_show_job}"; } || { total_show_job="$((total_show_job + 1))" && no_task_show_job="printf"; } - fi - done 4< "${SYNC_LIST}" - - printf "\nTotal Jobs Running: %s\n" "${total_show_job}" - [ -z "${1}" ] && "${no_task_show_job:-:}" "For more info: %s -j/--jobs v/verbose\n" "${0##*/}" - return 0 -} - -################################################### -# Setup required variables for a sync job -# Globals: 1 Variable -# SYNC_DETAIL_DIR -# Arguments: 1 -# ${1}" = Local folder name which will be synced -# Result: read description -################################################### -_setup_loop_variables() { - folder_setup_loop_variables="${1}" drive_folder_setup_loop_variables="${2}" - DIRECTORY="${SYNC_DETAIL_DIR}/${drive_folder_setup_loop_variables}${folder_setup_loop_variables}" - PID_FILE="${DIRECTORY}/pid" - SUCCESS_LOG="${DIRECTORY}/success_list" - ERROR_LOG="${DIRECTORY}/failed_list" - LOGS="${DIRECTORY}/logs" -} - -################################################### -# Create folder and files for a sync job -# Globals: 4 variables -# DIRECTORY, PID_FILE, SUCCESS_LOG, ERROR_LOG -# Arguments: None -# Result: read description -################################################### -_setup_loop_files() { - mkdir -p "${DIRECTORY}" - for file in PID_FILE SUCCESS_LOG ERROR_LOG; do - printf "" >> "$(eval printf "%s" \"\$"${file}"\")" - done - PID="$(cat "${PID_FILE}")" -} - -################################################### -# Check for new files in the sync folder and upload it -# A list is generated everytime, success and error. -# Globals: 4 variables, 1 function -# Variables - SUCCESS_LOG, ERROR_LOG, COMMAND_NAME, ARGS, GDRIVE_FOLDER -# Function - _remove_array_duplicates -# Arguments: None -# Result: read description -################################################### -_check_and_upload() { - unset all_check_and_upload initial_check_and_upload new_files_check_and_upload new_file_check_and_upload aseen_check_and_upload - - initial_check_and_upload="$(cat "${SUCCESS_LOG}")" - all_check_and_upload="$(cat "${SUCCESS_LOG}" "${ERROR_LOG}")" - - # check if folder is empty - [ "$(printf "%b\n" ./*)" = "./*" ] && return 0 - - # shellcheck disable=SC2086 - all_check_and_upload="${all_check_and_upload} -$(_tmp='printf -- "%b\n" * '${INCLUDE_FILES:+| grep -E ${INCLUDE_FILES}}'' && eval "${_tmp}")" - - # Open file discriptors for grep - exec 5<< EOF -$(printf "%s\n" "${initial_check_and_upload}") -EOF - exec 6<< EOF -$(printf "%s\n" "${all_check_and_upload}") -EOF - # shellcheck disable=SC2086 - new_files_check_and_upload="$(eval grep -vExf /dev/fd/5 /dev/fd/6 -e '^$' ${EXCLUDE_FILES} || :)" - # close file discriptos - exec 5<&- && exec 6<&- - - [ -n "${new_files_check_and_upload}" ] && printf "" >| "${ERROR_LOG}" && { - while read -r new_file_check_and_upload <&4 && - case "${aseen_check_and_upload}" in - *"|:_//_:|${new_file_check_and_upload}|:_//_:|"*) continue ;; - *) aseen_check_and_upload="${aseen_check_and_upload}|:_//_:|${new_file_check_and_upload}|:_//_:|" ;; - esac do - if eval "\"${COMMAND_PATH}\"" "\"${new_file_check_and_upload}\"" "${ARGS}"; then - printf "%s\n" "${new_file_check_and_upload}" >> "${SUCCESS_LOG}" - else - printf "%s\n" "${new_file_check_and_upload}" >> "${ERROR_LOG}" - printf "%s\n" "Error: Input - ${new_file_check_and_upload}" - fi - printf "\n" - done 4<< EOF -$(printf "%s\n" "${new_files_check_and_upload}") -EOF - } - return 0 -} - -################################################### -# Loop _check_and_upload function, sleep for sometime in between -# Globals: 1 variable, 1 function -# Variable - SYNC_TIME_TO_SLEEP -# Function - _check_and_upload -# Arguments: None -# Result: read description -################################################### -_loop() { - while :; do - _check_and_upload - sleep "${SYNC_TIME_TO_SLEEP}" - done -} - -################################################### -# Check if a loop exists with given input -# Globals: 3 variables, 3 function -# Variable - FOLDER, PID, GDRIVE_FOLDER -# Function - _setup_loop_variables, _setup_loop_files, _check_pid -# Arguments: None -# Result: return 0 - No existing loop, 1 - loop exists, 2 - loop only in database -# if return 2 - then remove entry from database -################################################### -_check_existing_loop() { - _setup_loop_variables "${FOLDER}" "${GDRIVE_FOLDER}" - _setup_loop_files - if [ -z "${PID}" ]; then - RETURN_STATUS=0 - elif _check_pid "${PID}"; then - RETURN_STATUS=1 - else - _remove_job "${PID}" - _setup_loop_variables "${FOLDER}" "${GDRIVE_FOLDER}" - _setup_loop_files - RETURN_STATUS=2 - fi - return 0 -} - -################################################### -# Start a new sync job by _loop function -# Print sync job information -# Globals: 7 variables, 1 function -# Variable - LOGS, PID_FILE, INPUT, GDRIVE_FOLDER, FOLDER, SYNC_LIST, FOREGROUND -# Function - _loop -# Arguments: None -# Result: read description -# Show logs at last and don't hangup if SHOW_LOGS is set -################################################### -_start_new_loop() { - if [ -n "${FOREGROUND}" ]; then - printf "%b\n" "Local Folder: ${INPUT}\nDrive Folder: ${GDRIVE_FOLDER}\n" - trap '_clear_line 1 && printf "\n" && _remove_job "" "${GDRIVE_FOLDER}" "${FOLDER}"; exit' INT TERM - trap 'printf "Job stopped.\n" ; exit' EXIT - _loop - else - (_loop 2>| "${LOGS}" 1>&2) & # A double fork doesn't get killed if script exits - PID="${!}" - printf "%s\n" "${PID}" >| "${PID_FILE}" - printf "%b\n" "Local Folder: ${INPUT}\nDrive Folder: ${GDRIVE_FOLDER}\nPID: ${PID}" - printf "%b\n" "PID: ${PID}|:_//_:|${FOLDER}|:_//_:|${GDRIVE_FOLDER}" >> "${SYNC_LIST}" - [ -n "${SHOW_LOGS}" ] && printf "\n" && tail -f "${LOGS}" - fi - return 0 -} - -################################################### -# Triggers in case either -j & -k or -l flag ( both -k|-j if with positive integer as argument ) -# Priority: -j > -i > -l > -k -# Globals: 5 variables, 6 functions -# Variables - JOB, SHOW_JOBS_VERBOSE, INFO_PID, LOG_PID, KILL_PID ( all array ) -# Functions - _check_pid, _setup_loop_variables -# _kill_job, _show_jobs, _get_job_info, _remove_job -# Arguments: None -# Result: show either job info, individual info or kill job(s) according to set global variables. -# Script exits after -j and -k if kill all is triggered ) -################################################### -_do_job() { - case "${JOB}" in - *SHOW_JOBS*) - _show_jobs "${SHOW_JOBS_VERBOSE:-}" - exit - ;; - *KILL_ALL*) - PIDS="$(_show_jobs | grep -o 'PID:.*[0-9]' | sed "s/PID: //g" || :)" && total=0 - [ -n "${PIDS}" ] && { - for _pid in ${PIDS}; do - printf "PID: %s - " "${_pid##* }" - _kill_job "${_pid##* }" - total="$((total + 1))" - done - } - printf "\nTotal Jobs Killed: %s\n" "${total}" - exit - ;; - *PIDS*) - unset Aseen && while read -r pid <&4 && { [ -n "${pid}" ] || continue; } && - case "${Aseen}" in - *"|:_//_:|${pid}|:_//_:|"*) continue ;; - *) Aseen="${Aseen}|:_//_:|${pid}|:_//_:|" ;; - esac do - case "${JOB_TYPE}" in - *INFO*) - _get_job_info "${pid}" more - [ "${RETURN_STATUS}" -gt 0 ] && { - [ "${RETURN_STATUS}" = 1 ] && _remove_job "${pid}" - printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 - } - ;; - esac - case "${JOB_TYPE}" in - *SHOW_LOGS*) - input="$(grep "${pid}" "${SYNC_LIST}" || :)" - if [ -n "${input}" ]; then - _check_pid "${pid}" && { - _tmp="${input#*"|:_//_:|"}" && local_folder="${_tmp%%"|:_//_:|"*/}" - _setup_loop_variables "${local_folder}" "${input##*"|:_//_:|"/}" - tail -f "${LOGS}" - } - else - printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 - fi - ;; - esac - case "${JOB_TYPE}" in - *KILL*) - _get_job_info "${pid}" - if [ "${RETURN_STATUS}" = 0 ]; then - _kill_job "${pid}" - else - [ "${RETURN_STATUS}" = 1 ] && _remove_job "${pid}" - printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 - fi - ;; - esac - done 4<< EOF -$(printf "%s\n" "${ALL_PIDS}") -EOF - case "${JOB_TYPE}" in - *INFO* | *SHOW_LOGS* | *KILL*) exit 0 ;; - esac - ;; - esac - return 0 -} - -################################################### -# Process all arguments given to the script -# Globals: 1 variable, 4 functions -# Variable - HOME -# Functions - _kill_jobs, _show_jobs, _get_job_info, _remove_array_duplicates -# Arguments: Many -# ${@} = Flags with arguments -# Result: On -# Success - Set all the variables -# Error - Print error message and exit -################################################### -_setup_arguments() { - [ $# = 0 ] && printf "Missing arguments\n" && return 1 - unset SYNC_TIME_TO_SLEEP ARGS COMMAND_NAME DEBUG GDRIVE_FOLDER KILL SHOW_LOGS - COMMAND_NAME="gupload" - - _check_longoptions() { - [ -z "${2}" ] && - printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && - exit 1 - return 0 - } - - while [ $# -gt 0 ]; do - case "${1}" in - -h | --help) _usage ;; - -D | --debug) DEBUG="true" && export DEBUG && _check_debug ;; - -d | --directory) - _check_longoptions "${1}" "${2}" - GDRIVE_FOLDER="${2}" && shift - ARGS=" ${ARGS} -C \"${GDRIVE_FOLDER}\" " - ;; - -j | --jobs) - case "${2}" in - v*) SHOW_JOBS_VERBOSE="true" && shift ;; - esac - JOB="SHOW_JOBS" - ;; - -p | --pid) - _check_longoptions "${1}" "${2}" - if [ "${2}" -gt 0 ] 2>| /dev/null 1>&2; then - ALL_PIDS="${ALL_PIDS} - ${2}" && shift - JOB=" ${JOBS} PIDS " - else - printf "%s\n" "-p/--pid only takes postive integer as arguments." - exit 1 - fi - ;; - -i | --info) JOB_TYPE=" ${JOB_TYPE} INFO " && INFO="true" ;; - -k | --kill) - JOB_TYPE=" ${JOB_TYPE} KILL " && KILL="true" - [ "${2}" = all ] && JOB="KILL_ALL" && shift - ;; - -l | --logs) JOB_TYPE=" ${JOB_TYPE} SHOW_LOGS " && SHOW_LOGS="true" ;; - -t | --time) - _check_longoptions "${1}" "${2}" - if [ "${2}" -gt 0 ] 2>| /dev/null 1>&2; then - case "${2}" in - default*) UPDATE_DEFAULT_TIME_TO_SLEEP="_update_config" ;; - esac - TO_SLEEP="${2##default=/}" && shift - else - printf "%s\n" "-t/--time only takes positive integers as arguments, min = 1, max = infinity." - exit 1 - fi - ;; - -a | --arguments) - _check_longoptions "${1}" "${2}" - case "${2}" in - default*) UPDATE_DEFAULT_ARGS="_update_config" ;; - esac - ARGS=" ${ARGS} ${2##default=} " && shift - ;; - -fg | --foreground) FOREGROUND="true" && SHOW_LOGS="true" ;; - -in | --include) - _check_longoptions "${1}" "${2}" - INCLUDE_FILES="${INCLUDE_FILES} -e '${2}' " && shift - ;; - -ex | --exclude) - _check_longoptions "${1}" "${2}" - EXCLUDE_FILES="${EXCLUDE_FILES} -e '${2}' " && shift - ;; - -c | --command) - _check_longoptions "${1}" "${2}" - CUSTOM_COMMAND_NAME="${2}" && shift - ;; - --sync-detail-dir) - _check_longoptions "${1}" "${2}" - SYNC_DETAIL_DIR="${2}" && shift - ;; - -s | --service) - _check_longoptions "${1}" "${2}" - SERVICE_NAME="${2}" && shift - CREATE_SERVICE="true" - ;; - *) - # Check if user meant it to be a flag - case "${1}" in - -*) printf '%s: %s: Unknown option\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && exit 1 ;; - *) # If no "-" is detected in 1st arg, it adds to input - FINAL_INPUT_ARRAY="${FINAL_INPUT_ARRAY} - ${1}" - ;; - esac - ;; - esac - shift - done - - INFO_PATH="${HOME}/.google-drive-upload" - CONFIG_INFO="${INFO_PATH}/google-drive-upload.configpath" - [ -f "${CONFIG_INFO}" ] && . "${CONFIG_INFO}" - CONFIG="${CONFIG:-${HOME}/.googledrive.conf}" - SYNC_DETAIL_DIR="${SYNC_DETAIL_DIR:-${INFO_PATH}/sync}" - SYNC_LIST="${SYNC_DETAIL_DIR}/sync_list" - mkdir -p "${SYNC_DETAIL_DIR}" && printf "" >> "${SYNC_LIST}" - - _do_job - - [ -z "${FINAL_INPUT_ARRAY}" ] && _short_help - - return 0 -} - -################################################### -# Grab config variables and modify defaults if necessary -# Globals: 5 variables, 2 functions -# Variables - INFO_PATH, UPDATE_DEFAULT_CONFIG, DEFAULT_ARGS -# UPDATE_DEFAULT_ARGS, UPDATE_DEFAULT_TIME_TO_SLEEP, TIME_TO_SLEEP -# Functions - _print_center, _update_config -# Arguments: None -# Result: grab COMMAND_NAME, INSTALL_PATH, and CONFIG -# source CONFIG, update default values if required -################################################### -_config_variables() { - COMMAND_NAME="${CUSTOM_COMMAND_NAME:-${COMMAND_NAME}}" - VALUES_LIST="REPO COMMAND_NAME SYNC_COMMAND_NAME INSTALL_PATH TYPE TYPE_VALUE" - VALUES_REGEX="" && for i in ${VALUES_LIST}; do - VALUES_REGEX="${VALUES_REGEX:+${VALUES_REGEX}|}^${i}=\".*\".* # added values" - done - - # Check if command exist, not necessary but just in case. - { - COMMAND_PATH="$(command -v "${COMMAND_NAME}")" 1> /dev/null && - SCRIPT_VALUES="$(grep -E "${VALUES_REGEX}|^SELF_SOURCE=\".*\"" "${COMMAND_PATH}" || :)" && eval "${SCRIPT_VALUES}" && - [ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ] && unset SOURCED_GUPLOAD - } || { printf "Error: %s is not installed, use -c/--command to specify.\n" "${COMMAND_NAME}" 1>&2 && exit 1; } - - ARGS=" ${ARGS} -q " - SYNC_TIME_TO_SLEEP="3" - # Config file is created automatically after first run - # shellcheck source=/dev/null - [ -r "${CONFIG}" ] && . "${CONFIG}" - - SYNC_TIME_TO_SLEEP="${TO_SLEEP:-${SYNC_TIME_TO_SLEEP}}" - ARGS=" ${ARGS} ${SYNC_DEFAULT_ARGS:-} " - "${UPDATE_DEFAULT_ARGS:-:}" SYNC_DEFAULT_ARGS " ${ARGS} " "${CONFIG}" - "${UPDATE_DEFAULT_TIME_TO_SLEEP:-:}" SYNC_TIME_TO_SLEEP "${SYNC_TIME_TO_SLEEP}" "${CONFIG}" - return 0 -} - -################################################### -# Print systemd service file contents -# Globals: 5 variables -# Variables - LOGNAME, INSTALL_PATH, COMMAND_NAME, SYNC_COMMAND_NAME, ALL_ARGUMNETS -# Arguments: None -################################################### -_systemd_service_contents() { - username_systemd_service_contents="${LOGNAME:?Give username}" install_path_systemd_service_contents="${INSTALL_PATH:?Missing install path}" - cmd_systemd_service_contents="${COMMAND_NAME:?Missing command name}" sync_cmd_systemd_service_contents="${SYNC_COMMAND_NAME:?Missing gsync cmd name}" - all_argumnets_systemd_service_contents="${ALL_ARGUMNETS:-}" - - printf "%s\n" '# Systemd service file - start -[Unit] -Description=google-drive-upload synchronisation service -After=network.target - -[Service] -Type=simple -User='"${username_systemd_service_contents}"' -Restart=on-abort -RestartSec=3 -ExecStart="'"${install_path_systemd_service_contents}/${sync_cmd_systemd_service_contents}"'" --foreground --command "'"${install_path_systemd_service_contents}/${cmd_systemd_service_contents}"'" --sync-detail-dir "/tmp/sync" '"${all_argumnets_systemd_service_contents}"' - -# Security -PrivateTmp=true -ProtectSystem=full -NoNewPrivileges=true -ProtectControlGroups=true -ProtectKernelModules=true -ProtectKernelTunables=true -PrivateDevices=true -RestrictAddressFamilies=AF_INET AF_INET6 AF_NETLINK -RestrictNamespaces=true -RestrictRealtime=true -SystemCallArchitectures=native - -[Install] -WantedBy=multi-user.target -# Systemd service file - end' -} - -################################################### -# Create systemd service wrapper script for managing the service -# Globals: None -# Arguments: 3 -# ${1} = Service name -# ${1} = Service file contents -# ${1} = Script name -# Result: print the script contents to script file -################################################### -_systemd_service_script() { - name_systemd_service_script="${1:?Missing service name}" script_systemd_service_script="" - service_file_contents_systemd_service_script="${2:?Missing service file contents}" script_name_systemd_service_script="${3:?Missing script name}" - - # shellcheck disable=SC2016 - script_systemd_service_script='#!/usr/bin/env sh -set -e - -_usage() { - printf "%b" "# Service name: '"'${name_systemd_service_script}'"' - -# Print the systemd service file contents -sh \"${0##*/}\" print\n -# Add service to systemd files ( this must be run before doing any of the below ) -sh \"${0##*/}\" add\n -# Start or Stop the service -sh \"${0##*/}\" start / stop\n -# Enable or Disable as a boot service: -sh \"${0##*/}\" enable / disable\n -# See logs -sh \"${0##*/}\" logs\n -# Remove the service from system -sh \"${0##*/}\" remove\n\n" - - _status - exit 0 -} - -_status() { - status_status="" current_status_status="" - status_status="$(systemctl status '"'${name_systemd_service_script}'"' 2>&1 || :)" - current_status_status="$(printf "%s\n" "${status_status}" | env grep -E "●.*|(Loaded|Active|Main PID|Tasks|Memory|CPU): .*" || :)" - - printf "%s\n" "Current status of service: ${current_status_status:-${status_status}}" - return 0 -} - -unset TMPFILE - -[ $# = 0 ] && _usage - -CONTENTS='"'${service_file_contents_systemd_service_script}'"' - -_add_service() { - service_file_path_add_service="/etc/systemd/system/'"${name_systemd_service_script}"'.service" - printf "%s\n" "Service file path: ${service_file_path_add_service}" - if [ -f "${service_file_path_add_service}" ]; then - printf "%s\n" "Service file already exists. Overwriting" - sudo mv "${service_file_path_add_service}" "${service_file_path_add_service}.bak" || exit 1 - printf "%s\n" "Existing service file was backed up." - printf "%s\n" "Old service file: ${service_file_path_add_service}.bak" - else - [ -z "${TMPFILE}" ] && { - { { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || - TMPFILE="$(pwd)/.$(_t="$(date +"%s")" && printf "%s\n" "$((_t * _t))").LOG"; } || exit 1 - } - export TMPFILE - trap "exit" INT TERM - _rm_tmpfile() { rm -f "${TMPFILE:?}" ; } - trap "_rm_tmpfile" EXIT - trap "" TSTP # ignore ctrl + z - - { printf "%s\n" "${CONTENTS}" >|"${TMPFILE}" && sudo cp "${TMPFILE}" /etc/systemd/system/'"${name_systemd_service_script}"'.service; } || - { printf "%s\n" "Error: Failed to add service file to system." && exit 1 ;} - fi - sudo systemctl daemon-reload || printf "%s\n" "Could not reload the systemd daemon." - printf "%s\n" "Service file was successfully added." - return 0 -} - -_service() { - service_name_service='"'${name_systemd_service_script}'"' action_service="${1:?}" service_file_path_service="" - service_file_path_service="/etc/systemd/system/${service_name_service}.service" - printf "%s\n" "Service file path: ${service_file_path_service}" - [ -f "${service_file_path_service}" ] || { printf "%s\n" "Service file does not exist." && exit 1; } - sudo systemctl daemon-reload || exit 1 - case "${action_service}" in - log*) sudo journalctl -u "${service_name_service}" -f ;; - rm | remove) - sudo systemctl stop "${service_name_service}" || : - if sudo rm -f /etc/systemd/system/"${service_name_service}".service; then - sudo systemctl daemon-reload || : - printf "%s\n" "Service removed." && return 0 - else - printf "%s\n" "Error: Cannot remove." && exit 1 - fi - ;; - *) - success_service="${2:?}" error_service="${3:-}" - if sudo systemctl "${action_service}" "${service_name_service}"; then - printf "%s\n" "Success: ${service_name_service} ${success_service}." && return 0 - else - printf "%s\n" "Error: Cannot ${action_service} ${service_name_service} ${error_service}." && exit 1 - fi - ;; - esac - return 0 -} - -while [ "${#}" -gt 0 ]; do - case "${1}" in - print) printf "%s\n" "${CONTENTS}" ;; - add) _add_service ;; - start) _service start started ;; - stop) _service stop stopped ;; - enable) _service enable "boot service enabled" "boot service" ;; - disable) _service disable "boot service disabled" "boot service" ;; - logs) _service logs ;; - remove) _service rm ;; - *) printf "%s\n" "Error: No valid options provided." && _usage ;; - esac - shift -done' - printf "%s\n" "${script_systemd_service_script}" >| "${script_name_systemd_service_script}" - return 0 -} - -################################################### -# Process all the values in "${FINAL_INPUT_ARRAY[@]}" -# Globals: 20 variables, 15 functions -# Variables - FINAL_INPUT_ARRAY ( array ), DEFAULT_ACCOUNT, ROOT_FOLDER_NAME, GDRIVE_FOLDER -# PID_FILE, SHOW_LOGS, LOGS, KILL, INFO, CREATE_SERVICE, ARGS, SERVICE_NAME -# Functions - _set_value, _systemd_service_script, _systemd_service_contents, _print_center, _check_existing_loop, _start_new_loop -# Arguments: None -# Result: Start the sync jobs for given folders, if running already, don't start new. -# If a pid is detected but not running, remove that job. -# If service script is going to be created then don,t touch the jobs -################################################### -_process_arguments() { - unset status_process_arguments_process_arguments current_folder_process_arguments_process_arguments Aseen - while read -r INPUT <&4 && { [ -n "${INPUT}" ] || continue; } && - case "${Aseen}" in - *"|:_//_:|${INPUT}|:_//_:|"*) continue ;; - *) Aseen="${Aseen}|:_//_:|${INPUT}|:_//_:|" ;; - esac do - ! [ -d "${INPUT}" ] && printf "\nError: Invalid Input ( %s ), no such directory.\n" "${INPUT}" && continue - current_folder_process_arguments="$(pwd)" - FOLDER="$(cd "${INPUT}" && pwd)" || exit 1 - [ -n "${DEFAULT_ACCOUNT}" ] && _set_value indirect ROOT_FOLDER_NAME "ACCOUNT_${DEFAULT_ACCOUNT}_ROOT_FOLDER_NAME" - GDRIVE_FOLDER="${GDRIVE_FOLDER:-${ROOT_FOLDER_NAME:-Unknown}}" - - [ -n "${CREATE_SERVICE}" ] && { - ALL_ARGUMNETS="\"${FOLDER}\" ${TO_SLEEP:+-t \"${TO_SLEEP}\"} -a \"${ARGS}\"" - num_process_arguments="${num_process_arguments+$(printf "%s\n" $((num_process_arguments + 1)))}" - service_name_process_arguments="gsync-${SERVICE_NAME}${num_process_arguments:+_${num_process_arguments}}" - script_name_process_arguments="${service_name_process_arguments}.service.sh" - _systemd_service_script "${service_name_process_arguments}" "$(_systemd_service_contents)" "${script_name_process_arguments}" - - _print_center "normal" "=" "=" - sh "${script_name_process_arguments}" - _print_center "normal" "=" "=" - continue - } - - cd "${FOLDER}" || exit 1 - _check_existing_loop - case "${RETURN_STATUS}" in - 0 | 2) _start_new_loop ;; - 1) - printf "%b\n" "Job is already running.." - if [ -n "${INFO}" ]; then - _get_job_info "${PID}" more "PID: ${PID}|:_//_:|${FOLDER}|:_//_:|${GDRIVE_FOLDER}" - else - printf "%b\n" "Local Folder: ${INPUT}\nDrive Folder: ${GDRIVE_FOLDER}" - printf "%s\n" "PID: ${PID}" - fi - - [ -n "${KILL}" ] && _kill_job "${PID}" && exit - [ -n "${SHOW_LOGS}" ] && tail -f "${LOGS}" - ;; - esac - cd "${current_folder_process_arguments}" || exit 1 - done 4<< EOF -$(printf "%s\n" "${FINAL_INPUT_ARRAY}") -EOF - return 0 -} - -main() { - [ $# = 0 ] && _short_help - - set -o errexit -o noclobber - - if [ -z "${SELF_SOURCE}" ]; then - UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" && SOURCE_UTILS=". '${UTILS_FOLDER}/common-utils.sh'" - eval "${SOURCE_UTILS}" || { printf "Error: Unable to source util files.\n" && exit 1; } - fi - - trap '' TSTP # ignore ctrl + z - - _setup_arguments "${@}" - _check_debug - _config_variables - _process_arguments -} - -main "${@}" diff --git a/sh/release/gupload b/sh/release/gupload deleted file mode 100755 index 71b9116..0000000 --- a/sh/release/gupload +++ /dev/null @@ -1,2254 +0,0 @@ -#!/usr/bin/env sh -SELF_SOURCE="true" -# Functions that will used in core script -# posix functions - -################################################### -# Convert bytes to human readable form -# Globals: None -# Required Arguments: 1 -# ${1} = Positive integer ( bytes ) -# Result: Print human readable form. -# Reference: -# https://unix.stackexchange.com/a/538015 -################################################### -_bytes_to_human() { - b_bytes_to_human="$(printf "%.0f\n" "${1:-0}")" s_bytes_to_human=0 - d_bytes_to_human='' type_bytes_to_human='' - while [ "${b_bytes_to_human}" -gt 1024 ]; do - d_bytes_to_human="$(printf ".%02d" $((b_bytes_to_human % 1024 * 100 / 1024)))" - b_bytes_to_human=$((b_bytes_to_human / 1024)) && s_bytes_to_human=$((s_bytes_to_human += 1)) - done - j=0 && for i in B KB MB GB TB PB EB YB ZB; do - j="$((j += 1))" && [ "$((j - 1))" = "${s_bytes_to_human}" ] && type_bytes_to_human="${i}" && break - continue - done - printf "%s\n" "${b_bytes_to_human}${d_bytes_to_human} ${type_bytes_to_human}" -} - -################################################### -# Check if debug is enabled and enable command trace -# Globals: 2 variables, 1 function -# Varibles - DEBUG, QUIET -# Function - _is_terminal -# Arguments: None -# Result: If DEBUG -# Present - Enable command trace and change print functions to avoid spamming. -# Absent - Disable command trace -# Check QUIET, then check terminal size and enable print functions accordingly. -################################################### -_check_debug() { - if [ -n "${DEBUG}" ]; then - set -x && PS4='-> ' - _print_center() { { [ $# = 3 ] && printf "%s\n" "${2}"; } || { printf "%s%s\n" "${2}" "${3}"; }; } - _clear_line() { :; } && _newline() { :; } - else - if [ -z "${QUIET}" ]; then - # check if running in terminal and support ansi escape sequences - if _support_ansi_escapes; then - ! COLUMNS="$(_get_columns_size)" || [ "${COLUMNS:-0}" -lt 45 ] 2>| /dev/null && - _print_center() { { [ $# = 3 ] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - export CURL_PROGRESS="-#" EXTRA_LOG="_print_center" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" - else - _print_center() { { [ $# = 3 ] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - _clear_line() { :; } - fi - _newline() { printf "%b" "${1}"; } - else - _print_center() { :; } && _clear_line() { :; } && _newline() { :; } - fi - set +x - fi -} - -################################################### -# Check internet connection. -# Probably the fastest way, takes about 1 - 2 KB of data, don't check for more than 10 secs. -# Globals: 3 functions -# _print_center, _clear_line, _timeout -# Arguments: None -# Result: On -# Success - Nothing -# Error - print message and exit 1 -################################################### -_check_internet() { - "${EXTRA_LOG}" "justify" "Checking Internet Connection.." "-" - if ! _timeout 10 curl -Is google.com --compressed; then - _clear_line 1 - "${QUIET:-_print_center}" "justify" "Error: Internet connection" " not available." "=" - return 1 - fi - _clear_line 1 -} - -################################################### -# Move cursor to nth no. of line and clear it to the begining. -# Globals: None -# Arguments: 1 -# ${1} = Positive integer ( line number ) -# Result: Read description -################################################### -_clear_line() { - printf "\033[%sA\033[2K" "${1}" -} - -################################################### -# Alternative to dirname command -# Globals: None -# Arguments: 1 -# ${1} = path of file or folder -# Result: read description -# Reference: -# https://github.com/dylanaraps/pure-sh-bible#file-paths -################################################### -_dirname() { - dir_dirname="${1:-.}" - dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" && [ "${dir_dirname##*/*}" ] && dir_dirname=. - dir_dirname="${dir_dirname%/*}" && dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" - printf '%s\n' "${dir_dirname:-/}" -} - -################################################### -# Convert given time in seconds to readable form -# 110 to 1 minute(s) and 50 seconds -# Globals: None -# Arguments: 1 -# ${1} = Positive Integer ( time in seconds ) -# Result: read description -# Reference: -# https://stackoverflow.com/a/32164707 -################################################### -_display_time() { - t_display_time="${1}" day_display_time="$((t_display_time / 60 / 60 / 24))" - hr_display_time="$((t_display_time / 60 / 60 % 24))" min_display_time="$((t_display_time / 60 % 60))" sec_display_time="$((t_display_time % 60))" - [ "${day_display_time}" -gt 0 ] && printf '%d days ' "${day_display_time}" - [ "${hr_display_time}" -gt 0 ] && printf '%d hrs ' "${hr_display_time}" - [ "${min_display_time}" -gt 0 ] && printf '%d minute(s) ' "${min_display_time}" - [ "${day_display_time}" -gt 0 ] || [ "${hr_display_time}" -gt 0 ] || [ "${min_display_time}" -gt 0 ] && printf 'and ' - printf '%d seconds\n' "${sec_display_time}" -} - -################################################### -# print column size -# use zsh or stty or tput -################################################### -_get_columns_size() { - { command -v bash 1>| /dev/null && bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1'; } || - { command -v zsh 1>| /dev/null && zsh -c 'printf "%s\n" "${COLUMNS}"'; } || - { command -v stty 1>| /dev/null && _tmp="$(stty size)" && printf "%s\n" "${_tmp##* }"; } || - { command -v tput 1>| /dev/null && tput cols; } || - return 1 -} - -################################################### -# Fetch latest commit sha of release or branch -# Do not use github rest api because rate limit error occurs -# Globals: None -# Arguments: 3 -# ${1} = "branch" or "release" -# ${2} = branch name or release name -# ${3} = repo name e.g labbots/google-drive-upload -# Result: print fetched sha -################################################### -_get_latest_sha() { - unset latest_sha_get_latest_sha raw_get_latest_sha - case "${1:-${TYPE}}" in - branch) - latest_sha_get_latest_sha="$( - raw_get_latest_sha="$(curl --compressed -s https://github.com/"${3:-${REPO}}"/commits/"${2:-${TYPE_VALUE}}".atom -r 0-2000)" - _tmp="$(printf "%s\n" "${raw_get_latest_sha}" | grep -o "Commit\\/.*<" -m1 || :)" && _tmp="${_tmp##*\/}" && printf "%s\n" "${_tmp%%<*}" - )" - ;; - release) - latest_sha_get_latest_sha="$( - raw_get_latest_sha="$(curl -L --compressed -s https://github.com/"${3:-${REPO}}"/releases/"${2:-${TYPE_VALUE}}")" - _tmp="$(printf "%s\n" "${raw_get_latest_sha}" | grep "=\"/""${3:-${REPO}}""/commit" -m1 || :)" && _tmp="${_tmp##*commit\/}" && printf "%s\n" "${_tmp%%\"*}" - )" - ;; - esac - printf "%b" "${latest_sha_get_latest_sha:+${latest_sha_get_latest_sha}\n}" -} - -################################################### -# Encode the given string to parse properly as json -# Globals: None -# Arguments: 2 -# ${1} = json or something else -# ${2} = input -# Result: if ${1} is j, then escape all chars, else only special chars -# Reference: -# https://tools.ietf.org/html/rfc7159#section-7 -################################################### -_json_escape() { - mode_json_escape="${1:?Missing mode}" input_json_escape="${2:?Provide Input}" output_json_escape="" - # just for refrence "s|'|\'|g" - if [ "${mode_json_escape}" = "j" ]; then - output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ - -e "s|\\\|\\\\\\\|g" \ - -e "s|\/|\\\/|g" \ - -e 's/\"/\\\"/g' \ - -e "s/$(printf '\t')/\\t/g" \ - -e "s/$(printf '\r')/\\r/g" \ - -e "s/$(printf '\f')/\\f/g")" - else - output_json_escape="$(printf "%s" "${input_json_escape}" | sed \ - -e "s/$(printf '\t')/\\t/g" \ - -e "s/$(printf '\r')/\\r/g" \ - -e "s/$(printf '\f')/\\f/g")" - fi - # use awk because sed just messes up with newlines - output_json_escape="$(printf "%s" "${output_json_escape}" | awk '{printf "%s%s",sep,$0; sep="\\n"} END{print ""}')" - printf "%s" "${output_json_escape}" -} - -################################################### -# Method to extract specified field data from json -# Globals: None -# Arguments: 2 -# ${1} - value of field to fetch from json -# ${2} - Optional, no of lines to parse for the given field in 1st arg -# ${3} - Optional, nth number of value from extracted values, default it 1. -# Input: file | pipe -# _json_value "Arguments" < file -# echo something | _json_value "Arguments" -# Result: print extracted value -################################################### -_json_value() { - { [ "${2}" -gt 0 ] 2>| /dev/null && no_of_lines_json_value="${2}"; } || : - { [ "${3}" -gt 0 ] 2>| /dev/null && num_json_value="${3}"; } || { ! [ "${3}" = all ] && num_json_value=1; } - # shellcheck disable=SC2086 - _tmp="$(grep -o "\"${1}\"\:.*" ${no_of_lines_json_value:+-m} ${no_of_lines_json_value})" || return 1 - printf "%s\n" "${_tmp}" | sed -e "s/.*\"""${1}""\"://" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num_json_value}"p || : - return 0 -} - -################################################### -# Print a text to center interactively and fill the rest of the line with text specified. -# This function is fine-tuned to this script functionality, so may appear unusual. -# Globals: 1 variable -# COLUMNS -# Arguments: 4 -# If ${1} = normal -# ${2} = text to print -# ${3} = symbol -# If ${1} = justify -# If remaining arguments = 2 -# ${2} = text to print -# ${3} = symbol -# If remaining arguments = 3 -# ${2}, ${3} = text to print -# ${4} = symbol -# Result: read description -# Reference: -# https://gist.github.com/TrinityCoder/911059c83e5f7a351b785921cf7ecda -################################################### -_print_center() { - [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 - term_cols_print_center="${COLUMNS}" - type_print_center="${1}" filler_print_center="" - case "${type_print_center}" in - normal) out_print_center="${2}" && symbol_print_center="${3}" ;; - justify) - if [ $# = 3 ]; then - input1_print_center="${2}" symbol_print_center="${3}" to_print_print_center="" out_print_center="" - to_print_print_center="$((term_cols_print_center - 5))" - { [ "${#input1_print_center}" -gt "${to_print_print_center}" ] && out_print_center="[ $(printf "%.${to_print_print_center}s\n" "${input1_print_center}")..]"; } || - { out_print_center="[ ${input1_print_center} ]"; } - else - input1_print_center="${2}" input2_print_center="${3}" symbol_print_center="${4}" to_print_print_center="" temp_print_center="" out_print_center="" - to_print_print_center="$((term_cols_print_center * 47 / 100))" - { [ "${#input1_print_center}" -gt "${to_print_print_center}" ] && temp_print_center=" $(printf "%.${to_print_print_center}s\n" "${input1_print_center}").."; } || - { temp_print_center=" ${input1_print_center}"; } - to_print_print_center="$((term_cols_print_center * 46 / 100))" - { [ "${#input2_print_center}" -gt "${to_print_print_center}" ] && temp_print_center="${temp_print_center}$(printf "%.${to_print_print_center}s\n" "${input2_print_center}").. "; } || - { temp_print_center="${temp_print_center}${input2_print_center} "; } - out_print_center="[${temp_print_center}]" - fi - ;; - *) return 1 ;; - esac - - str_len_print_center="${#out_print_center}" - [ "${str_len_print_center}" -ge "$((term_cols_print_center - 1))" ] && { - printf "%s\n" "${out_print_center}" && return 0 - } - - filler_print_center_len="$(((term_cols_print_center - str_len_print_center) / 2))" - - i_print_center=1 && while [ "${i_print_center}" -le "${filler_print_center_len}" ]; do - filler_print_center="${filler_print_center}${symbol_print_center}" && i_print_center="$((i_print_center + 1))" - done - - printf "%s%s%s" "${filler_print_center}" "${out_print_center}" "${filler_print_center}" - [ "$(((term_cols_print_center - str_len_print_center) % 2))" -ne 0 ] && printf "%s" "${symbol_print_center}" - printf "\n" - - return 0 -} - -################################################### -# Quiet version of _print_center -################################################### -_print_center_quiet() { - { [ $# = 3 ] && printf "%s\n" "${2}"; } || printf "%s%s\n" "${2}" "${3}" -} - -################################################### -# Evaluates value1=value2 -# Globals: None -# Arguments: 3 -# ${1} = direct ( d ) or indirect ( i ) - ( evaluation mode ) -# ${2} = var name -# ${3} = var value -# Result: export value1=value2 -################################################### -_set_value() { - case "${1:?}" in - d | direct) export "${2:?}=${3}" ;; - i | indirect) export "${2:?}=$(eval printf "%s" \"\$"${3}"\")" ;; - esac -} - -################################################### -# Check if script terminal supports ansi escapes -# Globals: 1 variable -# TERM -# Arguments: None -# Result: return 1 or 0 -################################################### -_support_ansi_escapes() { - unset ansi_escapes - case "${TERM}" in - xterm* | rxvt* | urxvt* | linux* | vt* | screen*) ansi_escapes="true" ;; - esac - { [ -t 2 ] && [ -n "${ansi_escapes}" ] && return 0; } || return 1 -} - -################################################### -# Alternative to timeout command -# Globals: None -# Arguments: 1 and rest -# ${1} = amount of time to sleep -# rest = command to execute -# Result: Read description -# Reference: -# https://stackoverflow.com/a/24416732 -################################################### -_timeout() { - timeout_timeout="${1:?Error: Specify Timeout}" && shift - { - "${@}" & - child="${!}" - trap -- "" TERM - { - sleep "${timeout_timeout}" - kill -9 "${child}" - } & - wait "${child}" - } 2>| /dev/null 1>&2 -} - -################################################### -# Config updater -# Incase of old value, update, for new value add. -# Globals: None -# Arguments: 3 -# ${1} = value name -# ${2} = value -# ${3} = config path -# Result: read description -################################################### -_update_config() { - [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 - value_name_update_config="${1}" value_update_config="${2}" config_path_update_config="${3}" - ! [ -f "${config_path_update_config}" ] && : >| "${config_path_update_config}" # If config file doesn't exist. - chmod u+w "${config_path_update_config}" || return 1 - printf "%s\n%s\n" "$(grep -v -e "^$" -e "^${value_name_update_config}=" "${config_path_update_config}" || :)" \ - "${value_name_update_config}=\"${value_update_config}\"" >| "${config_path_update_config}" || return 1 - chmod a-w-r-x,u+r "${config_path_update_config}" || return 1 - return 0 -} - -################################################### -# Encode the given string to parse properly in network requests -# Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string -# Reference: -# https://stackoverflow.com/a/41405682 -################################################### -_url_encode() ( - LC_ALL=C LANG=C - awk 'BEGIN {while (y++ < 125) z[sprintf("%c", y)] = y - while (y = substr(ARGV[1], ++j, 1)) - q = y ~ /[[:alnum:]]_.!~*\47()-]/ ? q y : q sprintf("%%%02X", z[y]) - print q}' "${1}" -) -# auth utils for Google Drive -# shellcheck source=/dev/null - -################################################### -# Check if account name is valid by a regex expression -# Globals: None -# Arguments: 1 -# ${1} = Account name -# Result: read description and return 1 or 0 -################################################### -_account_name_valid() { - name_account_name_valid="${1:?}" account_name_regex_account_name_valid='^([A-Za-z0-9_])+$' - printf "%s\n" "${name_account_name_valid}" | grep -qE "${account_name_regex_account_name_valid}" || return 1 - return 0 -} - -################################################### -# Check if account exists -# First check if the given account is in correct format -# Globals: 2 functions -# _set_value, _account_name_valid -# Arguments: 1 -# ${1} = Account name -# Result: read description and return 1 or 0 -################################################### -_account_exists() { - name_account_exists="${1:-}" client_id_account_exists="" client_secret_account_exists="" refresh_token_account_exists="" - _account_name_valid "${name_account_exists}" || return 1 - _set_value indirect client_id_account_exists "ACCOUNT_${name_account_exists}_CLIENT_ID" - _set_value indirect client_secret_account_exists "ACCOUNT_${name_account_exists}_CLIENT_SECRET" - _set_value indirect refresh_token_account_exists "ACCOUNT_${name_account_exists}_REFRESH_TOKEN" - [ -z "${client_id_account_exists:+${client_secret_account_exists:+${refresh_token_account_exists}}}" ] && return 1 - return 0 -} - -################################################### -# Show all accounts configured in config file -# Globals: 2 variables, 4 functions -# Variable - CONFIG, QUIET -# Functions - _account_exists, _set_value, _print_center, _reload_config -# Arguments: None -# Result: SHOW all accounts, export COUNT and ACC_${count}_ACC dynamic variables -# or print "No accounts configured yet." -################################################### -_all_accounts() { - { _reload_config && _handle_old_config; } || return 1 - COUNT=0 - while read -r account <&4 && [ -n "${account}" ]; do - _account_exists "${account}" && - { [ "${COUNT}" = 0 ] && "${QUIET:-_print_center}" "normal" " All available accounts. " "=" || :; } && - printf "%b" "$((COUNT += 1)). ${account} \n" && _set_value direct "ACC_${COUNT}_ACC" "${account}" - done 4<< EOF -$(grep -oE '^ACCOUNT_.*_CLIENT_ID' "${CONFIG}" | sed -e "s/ACCOUNT_//g" -e "s/_CLIENT_ID//g") -EOF - { [ "${COUNT}" -le 0 ] && "${QUIET:-_print_center}" "normal" " No accounts configured yet. " "=" 1>&2; } || printf '\n' - return 0 -} - -################################################### -# Setup a new account name -# If given account name is configured already, then ask for name -# after name has been properly setup, export ACCOUNT_NAME var -# Globals: 1 variable, 5 functions -# Variable - QUIET -# Functions - _print_center, _account_exists, _clear_line, _account_name_valid, _reload_config -# Arguments: 1 -# ${1} = Account name ( optional ) -# Result: read description and export ACCOUNT_NAME NEW_ACCOUNT_NAME -################################################### -_set_new_account_name() { - _reload_config || return 1 - new_account_name_set_new_account_name="${1:-}" && unset name_valid_set_new_account_name - [ -z "${new_account_name_set_new_account_name}" ] && { - _all_accounts 2>| /dev/null - "${QUIET:-_print_center}" "normal" " New account name: " "=" - "${QUIET:-_print_center}" "normal" "Info: Account names can only contain alphabets / numbers / dashes." " " && printf '\n' - } - until [ -n "${name_valid_set_new_account_name}" ]; do - if [ -n "${new_account_name_set_new_account_name}" ]; then - if _account_name_valid "${new_account_name_set_new_account_name}"; then - if _account_exists "${new_account_name_set_new_account_name}"; then - "${QUIET:-_print_center}" "normal" " Warning: Given account ( ${new_account_name_set_new_account_name} ) already exists, input different name. " "-" 1>&2 - unset new_account_name_set_new_account_name && continue - else - export new_account_name_set_new_account_name="${new_account_name_set_new_account_name}" ACCOUNT_NAME="${new_account_name_set_new_account_name}" && - name_valid_set_new_account_name="true" && continue - fi - else - "${QUIET:-_print_center}" "normal" " Warning: Given account name ( ${new_account_name_set_new_account_name} ) invalid, input different name. " "-" - unset new_account_name_set_new_account_name && continue - fi - else - [ -t 1 ] || { "${QUIET:-_print_center}" "normal" " Error: Not running in an interactive terminal, cannot ask for new account name. " 1>&2 && return 1; } - printf -- "-> \033[?7l" - read -r new_account_name_set_new_account_name - printf '\033[?7h' - fi - _clear_line 1 - done - "${QUIET:-_print_center}" "normal" " Given account name: ${NEW_ACCOUNT_NAME} " "=" - export ACCOUNT_NAME="${NEW_ACCOUNT_NAME}" - return 0 -} - -################################################### -# Delete a account from config file -# Globals: 2 variables, 3 functions -# Variables - CONFIG, QUIET -# Functions - _account_exists, _print_center, _reload_config -# Arguments: None -# Result: check if account exists and delete from config, else print error message -################################################### -_delete_account() { - { _reload_config && _handle_old_config; } || return 1 - account_delete_account="${1:?Error: give account name}" && unset regex_delete_account config_without_values_delete_account - if _account_exists "${account_delete_account}"; then - regex_delete_account="^ACCOUNT_${account_delete_account}_(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)|DEFAULT_ACCOUNT=\"${account_delete_account}\"" - config_without_values_delete_account="$(grep -vE "${regex_delete_account}" "${CONFIG}")" - chmod u+w "${CONFIG}" || return 1 # change perms to edit - printf "%s\n" "${config_without_values_delete_account}" >| "${CONFIG}" || return 1 - chmod "a-w-r-x,u+r" "${CONFIG}" || return 1 # restore perms - "${QUIET:-_print_center}" "normal" " Successfully deleted account ( ${account_delete_account} ) from config. " "-" - else - "${QUIET:-_print_center}" "normal" " Error: Cannot delete account ( ${account_delete_account} ) from config. No such account exists " "-" 1>&2 - fi - return 0 -} - -################################################### -# handle legacy config -# this will be triggered only if old config values are present, convert to new format -# new account will be created with "default" name, if default already taken, then add a number as suffix -# Globals: 7 variables, 2 functions -# Variables - CLIENT_ID CLIENT_SECRET, REFRESH_TOKEN, ROOT_FOLDER, ROOT_FOLDER_NAME CONFIG, ACCOUNT_NAME -# Functions - _account_exists, _reload_config -# Arguments: None -################################################### -_handle_old_config() { - export CLIENT_ID CLIENT_SECRET REFRESH_TOKEN # to handle a shellcheck warning - # only try to convert the if all three values are present - [ -n "${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}}" ] && { - account_name_handle_old_config="default" regex_check_handle_old_config config_without_values_handle_old_config count_handle_old_config - # first try to name the new account as default, otherwise try to add numbers as suffix - until ! _account_exists "${account_name_handle_old_config}"; do - account_name_handle_old_config="${account_name_handle_old_config}$((count_handle_old_config += 1))" - done - regex_check_handle_old_config="^(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)" - config_without_values_handle_old_config="$(grep -vE "${regex_check_handle_old_config}" "${CONFIG}")" - chmod u+w "${CONFIG}" || return 1 # change perms to edit - printf "%s\n%s\n%s\n%s\n%s\n%s\n" \ - "ACCOUNT_${account_name_handle_old_config}_CLIENT_ID=\"${CLIENT_ID}\"" \ - "ACCOUNT_${account_name_handle_old_config}_CLIENT_SECRET=\"${CLIENT_SECRET}\"" \ - "ACCOUNT_${account_name_handle_old_config}_REFRESH_TOKEN=\"${REFRESH_TOKEN}\"" \ - "ACCOUNT_${account_name_handle_old_config}_ROOT_FOLDER=\"${ROOT_FOLDER}\"" \ - "ACCOUNT_${account_name_handle_old_config}_ROOT_FOLDER_NAME=\"${ROOT_FOLDER_NAME}\"" \ - "${config_without_values_handle_old_config}" >| "${CONFIG}" || return 1 - - chmod "a-w-r-x,u+r" "${CONFIG}" || return 1 # restore perms - - _reload_config || return 1 # reload config file - } - return 0 -} - -################################################### -# handle old config values, new account creation, custom account name, updating default config and account -# start token service if applicable -# Globals: 12 variables, 7 functions -# Variables - DEFAULT_CONFIG, NEW_ACCOUNT_NAME, CUSTOM_ACCOUNT_NAME, DELETE_ACCOUNT_NAME, LIST_ACCOUNTS, QUIET -# UPDATE_DEFAULT_ACCOUNT, UPDATE_DEFAULT_CONFIG, CONFIG_INFO, CONTINUE_WITH_NO_INPUT -# Functions - _reload_config, _handle_old_config, _set_new_account_name, _account_exists, _all_accounts -# _check_account_credentials, _token_bg_service, _print_center, _update_config, _set_value -# Arguments: None -# Result: read description and start access token check in bg if required -################################################### -_check_credentials() { - { _reload_config && _handle_old_config; } || return 1 - # set account name to default account name - ACCOUNT_NAME="${DEFAULT_ACCOUNT}" - # if old values exist in config - - if [ -n "${NEW_ACCOUNT_NAME}" ]; then - # create new account, --create-account flag - _set_new_account_name "${NEW_ACCOUNT_NAME}" || return 1 - _check_account_credentials "${ACCOUNT_NAME}" || return 1 - else - if [ -n "${CUSTOM_ACCOUNT_NAME}" ]; then - if _account_exists "${CUSTOM_ACCOUNT_NAME}"; then - ACCOUNT_NAME="${CUSTOM_ACCOUNT_NAME}" - else - # error out in case CUSTOM_ACCOUNT_NAME is invalid - "${QUIET:-_print_center}" "normal" " Error: No such account ( ${CUSTOM_ACCOUNT_NAME} ) exists. " "-" && return 1 - fi - elif [ -n "${DEFAULT_ACCOUNT}" ]; then - # check if default account if valid or not, else set account name to nothing and remove default account in config - _account_exists "${DEFAULT_ACCOUNT}" || { - _update_config DEFAULT_ACCOUNT "" "${CONFIG}" && unset DEFAULT_ACCOUNT ACCOUNT_NAME && UPDATE_DEFAULT_ACCOUNT="_update_config" - } - # UPDATE_DEFAULT_ACCOUNT to true so that default config is updated later - else - UPDATE_DEFAULT_ACCOUNT="_update_config" # as default account doesn't exist - fi - - # in case no account name was set - if [ -z "${ACCOUNT_NAME}" ]; then - # if accounts are configured but default account is not set - if _all_accounts 2>| /dev/null && [ "${COUNT}" -gt 0 ]; then - # when only 1 account is configured, then set it as default - if [ "${COUNT}" -eq 1 ]; then - _set_value indirect ACCOUNT_NAME "ACC_1_ACC" # ACC_1_ACC comes from _all_accounts function - else - "${QUIET:-_print_center}" "normal" " Above accounts are configured, but default one not set. " "=" - if [ -t 1 ]; then - "${QUIET:-_print_center}" "normal" " Choose default account: " "-" - until [ -n "${ACCOUNT_NAME}" ]; do - printf -- "-> \033[?7l" - read -r account_name_check_credentials - printf '\033[?7h' - if [ "${account_name_check_credentials}" -gt 0 ] && [ "${account_name_check_credentials}" -le "${COUNT}" ]; then - _set_value indirect ACCOUNT_NAME "ACC_${COUNT}_ACC" - else - _clear_line 1 - fi - done - else - # if not running in a terminal then choose 1st one as default - printf "%s\n" "Warning: Script is not running in a terminal, choosing first account as default." - _set_value indirect ACCOUNT_NAME "ACC_1_ACC" # ACC_1_ACC comes from _all_accounts function - fi - fi - else - _set_new_account_name "" || return 1 - _check_account_credentials "${ACCOUNT_NAME}" || return 1 - fi - fi - _check_account_credentials "${ACCOUNT_NAME}" || return 1 - fi - - "${UPDATE_DEFAULT_ACCOUNT:-:}" DEFAULT_ACCOUNT "${ACCOUNT_NAME}" "${CONFIG}" # update default account if required - "${UPDATE_DEFAULT_CONFIG:-:}" CONFIG "${CONFIG}" "${CONFIG_INFO}" # update default config if required - - [ -n "${CONTINUE_WITH_NO_INPUT}" ] || _token_bg_service # launch token bg service - return 0 -} - -################################################### -# check credentials for a given account name -# Globals: 3 functions -# Functions - _check_client, _check_refresh_token, _check_access_token -# Arguments: 2 -# ${1} = Account name -# Result: read description, return 1 or 0 -################################################### -_check_account_credentials() { - account_name_check_account_credentials="${1:?Give account name}" - { - _check_client ID "${account_name_check_account_credentials}" && - _check_client SECRET "${account_name_check_account_credentials}" && - _check_refresh_token "${account_name_check_account_credentials}" && - _check_access_token "${account_name_check_account_credentials}" check - } || return 1 - return 0 -} - -################################################### -# Check client id or secret and ask if required -# Globals: 4 variables, 3 functions -# Variables - CONFIG, QUIET, CLIENT_ID_${ACCOUNT_NAME}, CLIENT_SECRET_${ACCOUNT_NAME} -# Functions - _print_center, _update_config, _set_value -# Arguments: 2 -# ${1} = ID or SECRET -# ${2} = Account name ( optional - if not given, then just CLIENT_[ID|SECRET] var is used ) -# Result: read description and export ACCOUNT_name_CLIENT_[ID|SECRET] CLIENT_[ID|SECRET] -################################################### -_check_client() { - type_check_client="CLIENT_${1:?Error: ID or SECRET}" account_name_check_client="${2:-}" - type_value_check_client="" type_regex_check_client="" && - unset type_name_check_client valid_check_client client_check_client message_check_client - export client_id_regex='[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com' client_secret_regex='[0-9A-Za-z_-]+' - type_name_check_client="${account_name_check_client:+ACCOUNT_${account_name_check_client}_}${type_check_client}" - - # set the type_value to the actual value of ACCOUNT_${account_name}_[ID|SECRET] - _set_value indirect type_value_check_client "${type_name_check_client}" - # set the type_regex to the actual value of client_id_regex or client_secret_regex - _set_value indirect type_regex_check_client "${type_check_client}_regex" - - until [ -n "${type_value_check_client}" ] && [ -n "${valid_check_client}" ]; do - [ -n "${type_value_check_client}" ] && { - if printf "%s\n" "${type_value_check_client}" | grep -qE "${type_regex_check_client}"; then - [ -n "${client_check_client}" ] && { _update_config "${type_name_check_client}" "${type_value_check_client}" "${CONFIG}" || return 1; } - valid_check_client="true" && continue - else - { [ -n "${client_check_client}" ] && message_check_client="- Try again"; } || message_check_client="in config ( ${CONFIG} )" - "${QUIET:-_print_center}" "normal" " Invalid Client ${1} ${message_check_client} " "-" && unset "${type_name_check_client}" client - fi - } - [ -z "${client_check_client}" ] && printf "\n" && "${QUIET:-_print_center}" "normal" " Enter Client ${1} " "-" - [ -n "${client_check_client}" ] && _clear_line 1 - printf -- "-> " - read -r "${type_name_check_client?}" && client_check_client=1 - _set_value indirect type_value_check_client "${type_name_check_client}" - done - - # export ACCOUNT_name_CLIENT_[ID|SECRET] - _set_value direct "${type_name_check_client}" "${type_value_check_client}" - # export CLIENT_[ID|SECRET] - _set_value direct "${type_check_client}" "${type_value_check_client}" - - return 0 -} - -################################################### -# Check refresh token and ask if required -# Globals: 8 variables, 4 functions -# Variables - CLIENT_ID, CLIENT_SECRET, REDIRECT_URI, TOKEN_URL, CONFIG, QUIET -# Functions - _set_value, _print_center, _update_config, _check_access_token -# Arguments: 1 -# ${1} = Account name ( optional - if not given, then just REFRESH_TOKEN var is used ) -# Result: read description & export REFRESH_TOKEN ACCOUNT_${account_name}_REFRESH_TOKEN -################################################### -_check_refresh_token() { - # bail out before doing anything if client id and secret is not present, unlikely to happen but just in case - [ -z "${CLIENT_ID:+${CLIENT_SECRET}}" ] && return 1 - account_name_check_refresh_token="${1:-}" - refresh_token_regex='[0-9]//[0-9A-Za-z_-]+' authorization_code_regex='[0-9]/[0-9A-Za-z_-]+' - refresh_token_name_check_refresh_token="${account_name_check_refresh_token:+ACCOUNT_${account_name_check_refresh_token}_}REFRESH_TOKEN" - - _set_value indirect refresh_token_value_check_refresh_token "${refresh_token_name_check_refresh_token}" - - [ -n "${refresh_token_value_check_refresh_token}" ] && { - ! printf "%s\n" "${refresh_token_value_check_refresh_token}" | grep -qE "${refresh_token_regex}" && - "${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token in config file, follow below steps.. " "-" && unset refresh_token_value_check_refresh_token - } - - [ -z "${refresh_token_value_check_refresh_token}" ] && { - printf "\n" && "${QUIET:-_print_center}" "normal" "If you have a refresh token generated, then type the token, else leave blank and press return key.." " " - printf "\n" && "${QUIET:-_print_center}" "normal" " Refresh Token " "-" && printf -- "-> " - read -r refresh_token_value_check_refresh_token - if [ -n "${refresh_token_value_check_refresh_token}" ]; then - "${QUIET:-_print_center}" "normal" " Checking refresh token.. " "-" - if printf "%s\n" "${refresh_token_value_check_refresh_token}" | grep -qE "${refresh_token_regex}"; then - _set_value direct REFRESH_TOKEN "${refresh_token_value_check_refresh_token}" - { _check_access_token "${account_name_check_refresh_token}" skip_check && - _update_config "${refresh_token_name_check_refresh_token}" "${refresh_token_value_check_refresh_token}" "${CONFIG}" && - _clear_line 1; } || check_error_check_refresh_token=true - else - check_error_check_refresh_token=true - fi - [ -n "${check_error_check_refresh_token}" ] && "${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token given, follow below steps to generate.. " "-" && unset refresh_token_value_check_refresh_token - else - "${QUIET:-_print_center}" "normal" " No Refresh token given, follow below steps to generate.. " "-" && unset refresh_token_value_check_refresh_token - fi - - [ -z "${refresh_token_value_check_refresh_token}" ] && { - printf "\n" && "${QUIET:-_print_center}" "normal" "Visit the below URL, tap on allow and then enter the code obtained" " " - URL="https://accounts.google.com/o/oauth2/auth?client_id=${CLIENT_ID}&redirect_uri=${REDIRECT_URI}&scope=${SCOPE}&response_type=code&prompt=consent" - printf "\n%s\n" "${URL}" - unset AUTHORIZATION_CODE authorization_code AUTHORIZATION_CODE_VALID response - until [ -n "${AUTHORIZATION_CODE}" ] && [ -n "${AUTHORIZATION_CODE_VALID}" ]; do - [ -n "${AUTHORIZATION_CODE}" ] && { - if printf "%s\n" "${AUTHORIZATION_CODE}" | grep -qE "${authorization_code_regex}"; then - AUTHORIZATION_CODE_VALID="true" && continue - else - "${QUIET:-_print_center}" "normal" " Invalid CODE given, try again.. " "-" && unset AUTHORIZATION_CODE authorization_code - fi - } - { [ -z "${authorization_code}" ] && printf "\n" && "${QUIET:-_print_center}" "normal" " Enter the authorization code " "-"; } || _clear_line 1 - printf -- "-> \033[?7l" - read -r AUTHORIZATION_CODE && authorization_code=1 - printf '\033[?7h' - done - response_check_refresh_token="$(curl --compressed "${CURL_PROGRESS}" -X POST \ - --data "code=${AUTHORIZATION_CODE}&client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&redirect_uri=${REDIRECT_URI}&grant_type=authorization_code" "${TOKEN_URL}")" || : - _clear_line 1 1>&2 - - refresh_token_value_check_refresh_token="$(printf "%s\n" "${response_check_refresh_token}" | _json_value refresh_token 1 1)" || - { printf "%s\n" "Error: Cannot fetch refresh token, make sure the authorization code was correct." && return 1; } - - _set_value direct REFRESH_TOKEN "${refresh_token_value_check_refresh_token}" - { _check_access_token "${account_name_check_refresh_token}" skip_check "${response_check_refresh_token}" && - _update_config "${refresh_token_name_check_refresh_token}" "${refresh_token_value_check_refresh_token}" "${CONFIG}"; } || return 1 - } - printf "\n" - } - - # export account_name_check_refresh_token_REFRESH_TOKEN - _set_value direct "${refresh_token_name_check_refresh_token}" "${refresh_token_value_check_refresh_token}" - # export REFRESH_TOKEN - _set_value direct REFRESH_TOKEN "${refresh_token_value_check_refresh_token}" - - return 0 -} - -################################################### -# Check access token and create/update if required -# Also update in config -# Globals: 9 variables, 3 functions -# Variables - CLIENT_ID, CLIENT_SECRET, REFRESH_TOKEN, TOKEN_URL, CONFIG, API_URL, API_VERSION, QUIET -# Functions - _print_center, _update_config, _set_value -# Arguments: 2 -# ${1} = Account name ( if not given, then just ACCESS_TOKEN var is used ) -# ${2} = if skip_check, then force create access token, else check with regex and expiry -# ${3} = json response ( optional ) -# Result: read description & export ACCESS_TOKEN ACCESS_TOKEN_EXPIRY -################################################### -_check_access_token() { - # bail out before doing anything if client id|secret or refresh token is not present, unlikely to happen but just in case - [ -z "${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}}" ] && return 1 - - account_name_check_access_token="${1:-}" no_check_check_access_token="${2:-false}" response_json_check_access_token="${3:-}" - unset token_name_check_access_token token_expiry_name_check_access_token token_value_check_access_token token_expiry_value_check_access_token response_check_access_token - access_token_regex='ya29\.[0-9A-Za-z_-]+' - token_name_check_access_token="${account_name_check_access_token:+ACCOUNT_${account_name_check_access_token}_}ACCESS_TOKEN" - token_expiry_name_check_access_token="${token_name_check_access_token}_EXPIRY" - - _set_value indirect token_value_check_access_token "${token_name_check_access_token}" - _set_value indirect token_expiry_value_check_access_token "${token_expiry_name_check_access_token}" - - [ "${no_check_check_access_token}" = skip_check ] || [ -z "${token_value_check_access_token}" ] || [ "${token_expiry_value_check_access_token:-0}" -lt "$(date +"%s")" ] || ! printf "%s\n" "${token_value_check_access_token}" | grep -qE "${access_token_regex}" && { - response_check_access_token="${response_json_check_access_token:-$(curl --compressed -s -X POST --data \ - "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")}" || : - - if token_value_check_access_token="$(printf "%s\n" "${response_check_access_token}" | _json_value access_token 1 1)"; then - token_expiry_value_check_access_token="$(($(date +"%s") + $(printf "%s\n" "${response_check_access_token}" | _json_value expires_in 1 1) - 1))" - _update_config "${token_name_check_access_token}" "${token_value_check_access_token}" "${CONFIG}" || return 1 - _update_config "${token_expiry_name_check_access_token}" "${token_expiry_value_check_access_token}" "${CONFIG}" || return 1 - else - "${QUIET:-_print_center}" "justify" "Error: Something went wrong" ", printing error." "=" 1>&2 - printf "%s\n" "${response_check_access_token}" 1>&2 - return 1 - fi - } - - # export ACCESS_TOKEN and ACCESS_TOKEN_EXPIRY - _set_value direct ACCESS_TOKEN "${token_value_check_access_token}" - _set_value direct ACCESS_TOKEN_EXPIRY "${token_expiry_value_check_access_token}" - - # export INITIAL_ACCESS_TOKEN which is used on script cleanup - _set_value direct INITIAL_ACCESS_TOKEN "${ACCESS_TOKEN}" - return 0 -} - -################################################### -# load config file if available, else create a empty file -# uses global variable CONFIG -################################################### -_reload_config() { - { [ -r "${CONFIG}" ] && . "${CONFIG}"; } || { printf "" >> "${CONFIG}" || return 1; } - return 0 -} - -################################################### -# launch a background service to check access token and update it -# checks ACCESS_TOKEN_EXPIRY, try to update before 5 mins of expiry, a fresh token gets 60 mins -# process will be killed when script exits or "${MAIN_PID}" is killed -# Globals: 4 variables, 1 function -# Variables - ACCESS_TOKEN, ACCESS_TOKEN_EXPIRY, MAIN_PID, TMPFILE -# Functions - _check_access_token -# Arguments: None -# Result: read description & export ACCESS_TOKEN_SERVICE_PID -################################################### -_token_bg_service() { - [ -z "${MAIN_PID}" ] && return 0 # don't start if MAIN_PID is empty - printf "%b\n" "ACCESS_TOKEN=\"${ACCESS_TOKEN}\"\nACCESS_TOKEN_EXPIRY=\"${ACCESS_TOKEN_EXPIRY}\"" >| "${TMPFILE}_ACCESS_TOKEN" - { - until ! kill -0 "${MAIN_PID}" 2>| /dev/null 1>&2; do - . "${TMPFILE}_ACCESS_TOKEN" - CURRENT_TIME="$(date +"%s")" - REMAINING_TOKEN_TIME="$((ACCESS_TOKEN_EXPIRY - CURRENT_TIME))" - if [ "${REMAINING_TOKEN_TIME}" -le 300 ]; then - # timeout after 30 seconds, it shouldn't take too long anyway, and update tmp config - CONFIG="${TMPFILE}_ACCESS_TOKEN" _timeout 30 _check_access_token "" skip_check || : - else - TOKEN_PROCESS_TIME_TO_SLEEP="$(if [ "${REMAINING_TOKEN_TIME}" -le 301 ]; then - printf "0\n" - else - printf "%s\n" "$((REMAINING_TOKEN_TIME - 300))" - fi)" - sleep "${TOKEN_PROCESS_TIME_TO_SLEEP}" - fi - sleep 1 - done - } & - export ACCESS_TOKEN_SERVICE_PID="${!}" - return 0 -} - -################################################### -# Search for an existing file on gdrive with write permission. -# Globals: 3 variables, 2 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value -# Arguments: 4 -# ${1} = file name -# ${2} = root dir id of file -# ${3} = mode ( size or md5Checksum or empty ) -# ${4} = if mode = empty, then not required -# mode = size, then size -# mode = md5Checksum, then md5sum -# Result: print search response if id fetched -# check size and md5sum if mode size or md5Checksum -# Reference: -# https://developers.google.com/drive/api/v3/search-files -################################################### -_check_existing_file() ( - [ $# -lt 2 ] && printf "Missing arguments\n" && return 1 - name_check_existing_file="${1}" rootdir_check_existing_file="${2}" mode_check_existing_file="${3}" param_value_check_existing_file="${4}" - unset query_check_existing_file response_check_existing_file id_check_existing_file - - "${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2 - query_check_existing_file="$(_url_encode "name=\"${name_check_existing_file}\" and '${rootdir_check_existing_file}' in parents and trashed=false and 'me' in writers")" - - response_check_existing_file="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - "${API_URL}/drive/${API_VERSION}/files?q=${query_check_existing_file}&fields=files(id,name,mimeType${mode_check_existing_file:+,${mode_check_existing_file}})&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - printf "%s\n" "${response_check_existing_file}" | _json_value id 1 1 2>| /dev/null 1>&2 || return 1 - - [ -n "${mode_check_existing_file}" ] && { - [ "$(printf "%s\n" "${response_check_existing_file}" | _json_value "${mode_check_existing_file}" 1 1)" = "${param_value_check_existing_file}" ] || return 1 - } - - printf "%s\n" "${response_check_existing_file}" - return 0 -) - -################################################### -# Copy/Clone a public gdrive file/folder from another/same gdrive account -# Globals: 6 variables, 6 functions -# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line, _json_escape -# Arguments: 5 -# ${1} = update or upload ( upload type ) -# ${2} = file id to upload -# ${3} = root dir id for file -# ${4} = name of file -# ${5} = size of file -# ${6} = md5sum of file -# Result: On -# Success - Upload/Update file and export FILE_ID -# Error - return 1 -# Reference: -# https://developers.google.com/drive/api/v2/reference/files/copy -################################################### -_clone_file() { - [ $# -lt 5 ] && printf "Missing arguments\n" && return 1 - job_clone_file="${1}" file_id_clone_file="${2}" file_root_id_clone_file="${3}" name_clone_file="${4}" size_clone_file="${5}" md5_clone_file="${6}" - unset post_data_clone_file response_clone_file readable_size_clone_file description_clone_file && STRING="Cloned" - readable_size_clone_file="$(printf "%s\n" "${size_clone_file}" | _bytes_to_human)" - escaped_name_clone_file="$(_json_escape j "${name_clone_file}")" print_name_clone_file="$(_json_escape p "${name_clone_file}")" - - # create description data - [ -n "${DESCRIPTION_FILE}" ] && { - description_clone_file="$(printf "%s\n" "${DESCRIPTION_FILE}" | sed -e "s|%f|${name_clone_file}|g|" -e "s|%f|${readable_size_clone_file}|g|")" - description_clone_file="$(_json_escape j "${description_clone_file}")" # escape for json - } - - post_data_clone_file="{\"parents\": [\"${file_root_id_clone_file}\"]${description_clone_file:+,\"description\":\"${description_clone_file}\"}}" - - _print_center "justify" "${print_name_clone_file} " "| ${readable_size_clone_file}" "=" - - if [ "${job_clone_file}" = update ]; then - unset file_check_json_clone_file check_value_type_clone_file check_value_clone_file - case "${CHECK_MODE}" in - 2) check_value_type_clone_file="size" check_value_clone_file="${size_clone_file}" ;; - 3) check_value_type_clone_file="md5Checksum" check_value_clone_file="${md5_clone_file}" ;; - esac - # Check if file actually exists. - if file_check_json_clone_file="$(_check_existing_file "${escaped_name_clone_file}" "${file_root_id_clone_file}" "${check_value_type_clone_file}" "${check_value_clone_file}")"; then - if [ -n "${SKIP_DUPLICATES}" ]; then - _collect_file_info "${file_check_json_clone_file}" "${print_name_clone_file}" || return 1 - _clear_line 1 - "${QUIET:-_print_center}" "justify" "${print_name_clone_file}" " already exists." "=" && return 0 - else - _print_center "justify" "Overwriting file.." "-" - { _file_id_clone_file="$(printf "%s\n" "${file_check_json_clone_file}" | _json_value id 1 1)" && - post_data_clone_file="$(_drive_info "${_file_id_clone_file}" "parents,writersCanShare")"; } || - { _error_logging_upload "${print_name_clone_file}" "${post_data_clone_file:-${file_check_json_clone_file}}" || return 1; } - if [ "${_file_id_clone_file}" != "${file_id_clone_file}" ]; then - _api_request -s \ - -X DELETE \ - "${API_URL}/drive/${API_VERSION}/files/${_file_id_clone_file}?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>| /dev/null 1>&2 || : - STRING="Updated" - else - _collect_file_info "${file_check_json_clone_file}" "${print_name_clone_file}" || return 1 - fi - fi - else - _print_center "justify" "Cloning file.." "-" - fi - else - _print_center "justify" "Cloning file.." "-" - fi - - # shellcheck disable=SC2086 - response_clone_file="$(_api_request ${CURL_PROGRESS} \ - -X POST \ - -H "Content-Type: application/json; charset=UTF-8" \ - -d "${post_data_clone_file}" \ - "${API_URL}/drive/${API_VERSION}/files/${file_id_clone_file}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" - for _ in 1 2 3; do _clear_line 1; done - _collect_file_info "${response_clone_file}" "${print_name_clone_file}" || return 1 - "${QUIET:-_print_center}" "justify" "${print_name_clone_file} " "| ${readable_size_clone_file} | ${STRING}" "=" - return 0 -} - -################################################### -# Create/Check directory in google drive. -# Globals: 3 variables, 3 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value, _json_escape -# Arguments: 2 -# ${1} = dir name -# ${2} = root dir id of given dir -# Result: print folder id -# Reference: -# https://developers.google.com/drive/api/v3/folder -################################################### -_create_directory() { - [ $# -lt 2 ] && printf "Missing arguments\n" && return 1 - dirname_create_directory="${1##*/}" rootdir_create_directory="${2}" - unset query_create_directory search_response_create_directory folder_id_create_directory - escaped_dirname_create_directory="$(_json_escape j "${dirname_create_directory}")" - print_dirname_create_directory="$(_json_escape p "${dirname_create_directory}")" - - "${EXTRA_LOG}" "justify" "Creating GDRIVE DIR:" " ${print_dirname_create_directory}" "-" 1>&2 - query_create_directory="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name=\"${escaped_dirname_create_directory}\" and trashed=false and '${rootdir_create_directory}' in parents")" - - search_response_create_directory="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - "${API_URL}/drive/${API_VERSION}/files?q=${query_create_directory}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - - if ! folder_id_create_directory="$(printf "%s\n" "${search_response_create_directory}" | _json_value id 1 1)"; then - unset create_folder_post_data_create_directory create_folder_response_create_directory - create_folder_post_data_create_directory="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${escaped_dirname_create_directory}\",\"parents\": [\"${rootdir_create_directory}\"]}" - create_folder_response_create_directory="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - -X POST \ - -H "Content-Type: application/json; charset=UTF-8" \ - -d "${create_folder_post_data_create_directory}" \ - "${API_URL}/drive/${API_VERSION}/files?fields=id&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - fi - _clear_line 1 1>&2 - - { folder_id_create_directory="${folder_id_create_directory:-$(printf "%s\n" "${create_folder_response_create_directory}" | _json_value id 1 1)}" && printf "%s\n" "${folder_id_create_directory}"; } || - { printf "%s\n" "${create_folder_response_create_directory}" 1>&2 && return 1; } - return 0 -} - -################################################### -# Get information for a gdrive folder/file. -# Globals: 3 variables, 1 function -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _json_value -# Arguments: 2 -# ${1} = folder/file gdrive id -# ${2} = information to fetch, e.g name, id -# Result: On -# Success - print fetched value -# Error - print "message" field from the json -# Reference: -# https://developers.google.com/drive/api/v3/search-files -################################################### -_drive_info() { - [ $# -lt 2 ] && printf "Missing arguments\n" && return 1 - folder_id_drive_info="${1}" fetch_drive_info="${2}" - unset search_response_drive_info - - "${EXTRA_LOG}" "justify" "Fetching info.." "-" 1>&2 - search_response_drive_info="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - "${API_URL}/drive/${API_VERSION}/files/${folder_id_drive_info}?fields=${fetch_drive_info}&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - printf "%b" "${search_response_drive_info:+${search_response_drive_info}\n}" - return 0 -} - -################################################### -# Extract ID from a googledrive folder/file url. -# Globals: None -# Arguments: 1 -# ${1} = googledrive folder/file url. -# Result: print extracted ID -################################################### -_extract_id() { - [ $# = 0 ] && printf "Missing arguments\n" && return 1 - LC_ALL=C id_extract_id="${1}" - case "${id_extract_id}" in - *'drive.google.com'*'id='*) _tmp="${id_extract_id##*id=}" && _tmp="${_tmp%%\?*}" && id_extract_id="${_tmp%%\&*}" ;; - *'drive.google.com'*'file/d/'* | 'http'*'docs.google.com'*'/d/'*) _tmp="${id_extract_id##*\/d\/}" && _tmp="${_tmp%%\/*}" && _tmp="${_tmp%%\?*}" && id_extract_id="${_tmp%%\&*}" ;; - *'drive.google.com'*'drive'*'folders'*) _tmp="${id_extract_id##*\/folders\/}" && _tmp="${_tmp%%\?*}" && id_extract_id="${_tmp%%\&*}" ;; - esac - printf "%b" "${id_extract_id:+${id_extract_id}\n}" -} - -################################################### -# Upload ( Create/Update ) files on gdrive. -# Interrupted uploads can be resumed. -# Globals: 8 variables, 11 functions -# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _url_encode, _json_value, _json_escape, _print_center, _bytes_to_human, _check_existing_file -# _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session -# _full_upload, _collect_file_info -# Arguments: 3 -# ${1} = update or upload ( upload type ) -# ${2} = file to upload -# ${3} = root dir id for file -# Result: On -# Success - Upload/Update file and export FILE_ID -# Error - return 1 -# Reference: -# https://developers.google.com/drive/api/v3/create-file -# https://developers.google.com/drive/api/v3/manage-uploads -# https://developers.google.com/drive/api/v3/reference/files/update -################################################### -_upload_file() { - [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 - job_upload_file="${1}" input_upload_file="${2}" folder_id_upload_file="${3}" - unset slug_upload_file inputname_upload_file extension_upload_file inputsize_upload_file readable_size_upload_file request_method_upload_file \ - url_upload_file postdata_upload_file uploadlink_upload_file upload_body_upload_file mime_type_upload_file description_upload_file \ - resume_args1_upload_file resume_args2_upload_file resume_args3_upload_file - - slug_upload_file="${input_upload_file##*/}" - escaped_slug_upload_file="$(_json_escape j "${slug_upload_file}")" print_slug_upload_file="$(_json_escape p "${slug_upload_file}")" - inputname_upload_file="${slug_upload_file%.*}" - extension_upload_file="${slug_upload_file##*.}" - inputsize_upload_file="$(($(wc -c < "${input_upload_file}")))" && content_length_upload_file="${inputsize_upload_file}" - readable_size_upload_file="$(printf "%s\n" "${inputsize_upload_file}" | _bytes_to_human)" - - # Handle extension-less files - [ "${inputname_upload_file}" = "${extension_upload_file}" ] && { - mime_type_upload_file="$(file --brief --mime-type "${input_upload_file}" || mimetype --output-format %m "${input_upload_file}")" 2>| /dev/null || { - "${QUIET:-_print_center}" "justify" "Error: file or mimetype command not found." "=" && printf "\n" - exit 1 - } - } - - # create description data - [ -n "${DESCRIPTION_FILE}" ] && { - description_upload_file="$(printf "%s\n" "${DESCRIPTION_FILE}" | sed -e "s|%f|${slug_upload_file}|g" -e "s|%f|${readable_size_upload_file}|g" -e "s|%m|${mime_type_upload_file}|g")" - description_upload_file="$(_json_escape j "${description_upload_file}")" # escape for json - } - - _print_center "justify" "${print_slug_upload_file}" " | ${readable_size_upload_file}" "=" - - # Set proper variables for overwriting files - [ "${job_upload_file}" = update ] && { - unset file_check_json_upload_file check_value_upload_file - case "${CHECK_MODE}" in - 2) check_value_type_upload_file="size" check_value_upload_file="${inputsize_upload_file}" ;; - 3) - check_value_type_upload_file="md5Checksum" - check_value_upload_file="$(md5sum "${input_upload_file}")" || { - "${QUIET:-_print_center}" "justify" "Error: cannot calculate md5sum of given file." "=" 1>&2 - return 1 - } - check_value_upload_file="${check_value_upload_file%% *}" - ;; - esac - # Check if file actually exists, and create if not. - if file_check_json_upload_file="$(_check_existing_file "${escaped_slug_upload_file}" "${folder_id_upload_file}" "${check_value_type_upload_file}" "${check_value_upload_file}")"; then - if [ -n "${SKIP_DUPLICATES}" ]; then - # Stop upload if already exists ( -d/--skip-duplicates ) - _collect_file_info "${file_check_json_upload_file}" "${print_slug_upload_file}" || return 1 - _clear_line 1 - "${QUIET:-_print_center}" "justify" "${print_slug_upload_file} already exists." "=" && return 0 - else - request_method_upload_file="PATCH" - _file_id_upload_file="$(printf "%s\n" "${file_check_json_upload_file}" | _json_value id 1 1)" || - { _error_logging_upload "${print_slug_upload_file}" "${file_check_json_upload_file}" || return 1; } - url_upload_file="${API_URL}/upload/drive/${API_VERSION}/files/${_file_id_upload_file}?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" - # JSON post data to specify the file name and folder under while the file to be updated - postdata_upload_file="{\"mimeType\": \"${mime_type_upload_file}\",\"name\": \"${escaped_slug_upload_file}\",\"addParents\": [\"${folder_id_upload_file}\"]${description_upload_file:+,\"description\":\"${description_upload_file}\"}}" - STRING="Updated" - fi - else - job_upload_file="create" - fi - } - - # Set proper variables for creating files - [ "${job_upload_file}" = create ] && { - url_upload_file="${API_URL}/upload/drive/${API_VERSION}/files?uploadType=resumable&supportsAllDrives=true&includeItemsFromAllDrives=true" - request_method_upload_file="POST" - # JSON post data to specify the file name and folder under while the file to be created - postdata_upload_file="{\"mimeType\": \"${mime_type_upload_file}\",\"name\": \"${escaped_slug_upload_file}\",\"parents\": [\"${folder_id_upload_file}\"]${description_upload_file:+,\"description\":\"${description_upload_file}\"}}" - STRING="Uploaded" - } - - __file_upload_file="${INFO_PATH}/${print_slug_upload_file}__::__${folder_id_upload_file}__::__${inputsize_upload_file}" - # https://developers.google.com/drive/api/v3/manage-uploads - if [ -r "${__file_upload_file}" ]; then - uploadlink_upload_file="$(cat "${__file_upload_file}" || :)" - http_code_upload_file="$(curl --compressed -s -X PUT "${uploadlink_upload_file}" -o /dev/null --write-out %"{http_code}")" || : - case "${http_code_upload_file}" in - 308) # Active Resumable URI give 308 status - uploaded_range_upload_file="$(raw_upload_file="$(curl --compressed -s -X PUT \ - -H "Content-Range: bytes */${content_length_upload_file}" \ - --url "${uploadlink_upload_file}" --globoff -D - || :)" && - printf "%s\n" "${raw_upload_file##*[R,r]ange: bytes=0-}" | while read -r line; do printf "%s\n" "${line%%$(printf '\r')}" && break; done)" - if [ "${uploaded_range_upload_file}" -gt 0 ] 2>| /dev/null; then - _print_center "justify" "Resuming interrupted upload.." "-" && _newline "\n" - content_range_upload_file="$(printf "bytes %s-%s/%s\n" "$((uploaded_range_upload_file + 1))" "$((inputsize_upload_file - 1))" "${inputsize_upload_file}")" - content_length_upload_file="$((inputsize_upload_file - $((uploaded_range_upload_file + 1))))" - # Resuming interrupted uploads needs http1.1 - resume_args1_upload_file='-s' resume_args2_upload_file='--http1.1' resume_args3_upload_file="Content-Range: ${content_range_upload_file}" - _upload_file_from_uri _clear_line - _collect_file_info "${upload_body_upload_file}" "${print_slug_upload_file}" || return 1 - _normal_logging_upload - _remove_upload_session - else - _full_upload || return 1 - fi - ;; - 4[0-9][0-9] | 000) # Dead Resumable URI give 40* status - _full_upload || return 1 - ;; - 201 | 200) # Completed Resumable URI give 20* status - upload_body_upload_file="${http_code_upload_file}" - _collect_file_info "${upload_body_upload_file}" "${print_slug_upload_file}" || return 1 - _normal_logging_upload - _remove_upload_session - ;; - esac - else - _full_upload || return 1 - fi - return 0 -} - -################################################### -# Sub functions for _upload_file function - Start -# generate resumable upload link -_generate_upload_link() { - "${EXTRA_LOG}" "justify" "Generating upload link.." "-" 1>&2 - uploadlink_upload_file="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - -X "${request_method_upload_file}" \ - -H "Content-Type: application/json; charset=UTF-8" \ - -H "X-Upload-Content-Type: ${mime_type_upload_file}" \ - -H "X-Upload-Content-Length: ${inputsize_upload_file}" \ - -d "$postdata_upload_file" \ - "${url_upload_file}" \ - -D - || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - case "${uploadlink_upload_file}" in - *'ocation: '*'upload_id'*) uploadlink_upload_file="$(printf "%s\n" "${uploadlink_upload_file##*[L,l]ocation: }" | while read -r line; do printf "%s\n" "${line%%$(printf '\r')}" && break; done)" && return 0 ;; - '' | *) return 1 ;; - esac - - return 0 -} - -# Curl command to push the file to google drive. -_upload_file_from_uri() { - _print_center "justify" "Uploading.." "-" - # shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic. - upload_body_upload_file="$(_api_request ${CURL_PROGRESS} \ - -X PUT \ - -H "Content-Type: ${mime_type_upload_file}" \ - -H "Content-Length: ${content_length_upload_file}" \ - -H "Slug: ${print_slug_upload_file}" \ - -T "${input_upload_file}" \ - -o- \ - --url "${uploadlink_upload_file}" \ - --globoff \ - ${CURL_SPEED} ${resume_args1_upload_file} ${resume_args2_upload_file} \ - -H "${resume_args3_upload_file}" || :)" - [ -z "${VERBOSE_PROGRESS}" ] && for _ in 1 2; do _clear_line 1; done && "${1:-:}" - return 0 -} - -# logging in case of successful upload -_normal_logging_upload() { - [ -z "${VERBOSE_PROGRESS}" ] && _clear_line 1 - "${QUIET:-_print_center}" "justify" "${slug_upload_file} " "| ${readable_size_upload_file} | ${STRING}" "=" - return 0 -} - -# Tempfile Used for resuming interrupted uploads -_log_upload_session() { - [ "${inputsize_upload_file}" -gt 1000000 ] && printf "%s\n" "${uploadlink_upload_file}" >| "${__file_upload_file}" - return 0 -} - -# remove upload session -_remove_upload_session() { - rm -f "${__file_upload_file}" - return 0 -} - -# wrapper to fully upload a file from scratch -_full_upload() { - _generate_upload_link || { _error_logging_upload "${print_slug_upload_file}" "${uploadlink_upload_file}" || return 1; } - _log_upload_session - _upload_file_from_uri - _collect_file_info "${upload_body_upload_file}" "${print_slug_upload_file}" || return 1 - _normal_logging_upload - _remove_upload_session - return 0 -} -# Sub functions for _upload_file function - End -################################################### - -################################################### -# Share a gdrive file/folder -# Globals: 3 variables, 4 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value, _print_center, _clear_line -# Arguments: 2 -# ${1} = gdrive ID of folder/file -# ${2} = Email to which file will be shared ( optional ) -# Result: read description -# Reference: -# https://developers.google.com/drive/api/v3/manage-sharing -################################################### -_share_id() { - [ $# -lt 2 ] && printf "Missing arguments\n" && return 1 - id_share_id="${1}" role_share_id="${2:?Missing role}" share_email_share_id="${3}" role_share_id="reader" type_share_id="${share_email_share_id:+user}" - unset post_data_share_id response_share_id - - "${EXTRA_LOG}" "justify" "Sharing.." "-" 1>&2 - post_data_share_id="{\"role\":\"${role_share_id}\",\"type\":\"${type_share_id:-anyone}\"${share_email_share_id:+,\"emailAddress\":\"${share_email_share_id}\"}}" - - response_share_id="$(_api_request "${CURL_PROGRESS_EXTRA}" \ - -X POST \ - -H "Content-Type: application/json; charset=UTF-8" \ - -d "${post_data_share_id}" \ - "${API_URL}/drive/${API_VERSION}/files/${id_share_id}/permissions?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2 - _clear_line 1 1>&2 - - { printf "%s\n" "${response_share_id}" | _json_value id 1 1 2>| /dev/null 1>&2 && return 0; } || - { printf "%s\n" "Error: Cannot Share." 1>&2 && printf "%s\n" "${response_share_id}" 1>&2 && return 1; } -} -# shellcheck source=/dev/null - -################################################### -# A simple wrapper to check tempfile for access token and make authorized oauth requests to drive api -################################################### -_api_request() { - . "${TMPFILE}_ACCESS_TOKEN" - - curl --compressed \ - -H "Authorization: Bearer ${ACCESS_TOKEN}" \ - "${@}" -} - -################################################### -# Used in collecting file properties from output json after a file has been uploaded/cloned -# Also handles logging in log file if LOG_FILE_ID is set -# Globals: 1 variables, 2 functions -# Variables - LOG_FILE_ID -# Functions - _error_logging_upload, _json_value -# Arguments: 1 -# ${1} = output jsom -# Result: set fileid and link, save info to log file if required -################################################### -_collect_file_info() { - json_collect_file_info="${1}" info_collect_file_info="" - FILE_ID="$(printf "%s\n" "${json_collect_file_info}" | _json_value id 1 1)" || { _error_logging_upload "${2}" "${json_collect_file_info}" || return 1; } - { [ -z "${LOG_FILE_ID}" ] || [ -d "${LOG_FILE_ID}" ]; } && return 0 - info_collect_file_info="Link: https://drive.google.com/open?id=${FILE_ID} -Name: $(printf "%s\n" "${json_collect_file_info}" | _json_value name 1 1 || :) -ID: ${FILE_ID} -Type: $(printf "%s\n" "${json_collect_file_info}" | _json_value mimeType 1 1 || :)" - printf "%s\n\n" "${info_collect_file_info}" >> "${LOG_FILE_ID}" - return 0 -} - -################################################### -# Error logging wrapper -################################################### -_error_logging_upload() { - log_error_logging_upload="${2}" - "${QUIET:-_print_center}" "justify" "Upload ERROR" ", ${1:-} not ${STRING:-uploaded}." "=" 1>&2 - case "${log_error_logging_upload}" in - # https://github.com/rclone/rclone/issues/3857#issuecomment-573413789 - *'"message": "User rate limit exceeded."'*) - printf "%s\n\n%s\n" "${log_error_logging_upload}" \ - "Today's upload limit reached for this account. Use another account to upload or wait for tomorrow." 1>&2 - # Never retry if upload limit reached - export RETRY=0 - ;; - '' | *) printf "%s\n" "${log_error_logging_upload}" 1>&2 ;; - esac - printf "\n\n\n" 1>&2 - return 1 -} - -################################################### -# A small function to get rootdir id for files in sub folder uploads -# Globals: 1 variable, 1 function -# Variables - DIRIDS -# Functions - _dirname -# Arguments: 1 -# ${1} = filename -# Result: read discription -################################################### -_get_rootdir_id() { - file_gen_final_list="${1:?Error: give filename}" - rootdir_gen_final_list="$(_dirname "${file_gen_final_list}")" - temp_gen_final_list="$(printf "%s\n" "${DIRIDS:?Error: DIRIDS Missing}" | grep -F "|:_//_:|${rootdir_gen_final_list}|:_//_:|" || :)" - printf "%s\n" "${temp_gen_final_list%%"|:_//_:|${rootdir_gen_final_list}|:_//_:|"}" - return 0 -} - -################################################### -# A extra wrapper for _upload_file function to properly handle retries -# also handle uploads in case uploading from folder -# Globals: 3 variables, 1 function -# Variables - RETRY, UPLOAD_MODE and ACCESS_TOKEN -# Functions - _upload_file -# Arguments: 3 -# ${1} = parse or norparse -# ${2} = file path -# ${3} = if ${1} != parse; gdrive folder id to upload; fi -# Result: set SUCCESS var on success -################################################### -_upload_file_main() { - [ $# -lt 2 ] && printf "Missing arguments\n" && return 1 - file_upload_file_main="${2}" sleep_upload_file_main=0 - { [ "${1}" = parse ] && dirid_upload_file_main="$(_get_rootdir_id "${file_upload_file_main}")"; } || dirid_upload_file_main="${3}" - - retry_upload_file_main="${RETRY:-0}" && unset RETURN_STATUS - until [ "${retry_upload_file_main}" -le 0 ] && [ -n "${RETURN_STATUS}" ]; do - if [ -n "${4}" ]; then - { _upload_file "${UPLOAD_MODE:-create}" "${file_upload_file_main}" "${dirid_upload_file_main}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break; } || RETURN_STATUS=2 - else - { _upload_file "${UPLOAD_MODE:-create}" "${file_upload_file_main}" "${dirid_upload_file_main}" && RETURN_STATUS=1 && break; } || RETURN_STATUS=2 - fi - # decrease retry using -=, skip sleep if all retries done - [ "$((retry_upload_file_main -= 1))" -lt 1 ] && sleep "$((sleep_upload_file_main += 1))" - # on every retry, sleep the times of retry it is, e.g for 1st, sleep 1, for 2nd, sleep 2 - continue - done - [ -n "${4}" ] && { - { [ "${RETURN_STATUS}" = 1 ] && printf "%s\n" "${file_upload_file_main}"; } || printf "%s\n" "${file_upload_file_main}" 1>&2 - } - return 0 -} - -################################################### -# Upload all files in the given folder, parallelly or non-parallely and show progress -# Globals: 7 variables, 3 functions -# Variables - VERBOSE, VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET -# Functions - _clear_line, _newline, _print_center and _upload_file_main -# Arguments: 4 -# ${1} = parallel or normal -# ${2} = parse or norparse -# ${3} = filenames with full path -# ${4} = if ${2} != parse; then gdrive folder id to upload; fi -# Result: read discription, set SUCCESS_STATUS & ERROR_STATUS -################################################### -_upload_folder() { - [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 - mode_upload_folder="${1}" PARSE_MODE="${2}" files_upload_folder="${3}" ID="${4:-}" && export PARSE_MODE ID - SUCCESS_STATUS=0 SUCCESS_FILES="" ERROR_STATUS=0 ERROR_FILES="" - case "${mode_upload_folder}" in - normal) - [ "${PARSE_MODE}" = parse ] && _clear_line 1 && _newline "\n" - - while read -r file <&4; do - _upload_file_main "${PARSE_MODE}" "${file}" "${ID}" - { [ "${RETURN_STATUS}" = 1 ] && : "$((SUCCESS_STATUS += 1))" && SUCCESS_FILES="$(printf "%b\n" "${SUCCESS_STATUS:+${SUCCESS_STATUS}\n}${file}")"; } || - { : "$((ERROR_STATUS += 1))" && ERROR_FILES="$(printf "%b\n" "${ERROR_STATUS:+${ERROR_STATUS}\n}${file}")"; } - if [ -n "${VERBOSE:-${VERBOSE_PROGRESS}}" ]; then - _print_center "justify" "Status: ${SUCCESS_STATUS} Uploaded" " | ${ERROR_STATUS} Failed" "=" && _newline "\n" - else - for _ in 1 2; do _clear_line 1; done - _print_center "justify" "Status: ${SUCCESS_STATUS} Uploaded" " | ${ERROR_STATUS} Failed" "=" - fi - done 4<< EOF -$(printf "%s\n" "${files_upload_folder}") -EOF - ;; - parallel) - NO_OF_PARALLEL_JOBS_FINAL="$((NO_OF_PARALLEL_JOBS > NO_OF_FILES ? NO_OF_FILES : NO_OF_PARALLEL_JOBS))" - [ -f "${TMPFILE}"SUCCESS ] && rm "${TMPFILE}"SUCCESS - [ -f "${TMPFILE}"ERROR ] && rm "${TMPFILE}"ERROR - - # shellcheck disable=SC2016 - (printf "%s\n" "${files_upload_folder}" | xargs -P"${NO_OF_PARALLEL_JOBS_FINAL}" -I "{}" -n 1 sh -c ' - eval "${SOURCE_UTILS}" - _upload_file_main "${PARSE_MODE}" "{}" "${ID}" true - ' 1>| "${TMPFILE}"SUCCESS 2>| "${TMPFILE}"ERROR) & - pid="${!}" - - until [ -f "${TMPFILE}"SUCCESS ] || [ -f "${TMPFILE}"ERORR ]; do sleep 0.5; done - [ "${PARSE_MODE}" = parse ] && _clear_line 1 - _newline "\n" - - until ! kill -0 "${pid}" 2>| /dev/null 1>&2; do - SUCCESS_STATUS="$(($(wc -l < "${TMPFILE}"SUCCESS)))" - ERROR_STATUS="$(($(wc -l < "${TMPFILE}"ERROR)))" - sleep 1 - [ "$((SUCCESS_STATUS + ERROR_STATUS))" != "${TOTAL}" ] && - _clear_line 1 && "${QUIET:-_print_center}" "justify" "Status" ": ${SUCCESS_STATUS} Uploaded | ${ERROR_STATUS} Failed" "=" - TOTAL="$((SUCCESS_STATUS + ERROR_STATUS))" - done - SUCCESS_STATUS="$(($(wc -l < "${TMPFILE}"SUCCESS)))" SUCCESS_FILES="$(cat "${TMPFILE}"SUCCESS)" - ERROR_STATUS="$(($(wc -l < "${TMPFILE}"ERROR)))" ERROR_FILES="$(cat "${TMPFILE}"ERROR)" - export SUCCESS_FILES ERROR_FILES - ;; - esac - return 0 -} -# Upload a file to Google Drive -# shellcheck source=/dev/null - -_usage() { - printf "%b" " -The script can be used to upload file/directory to google drive.\n -Usage:\n ${0##*/} [options.. ] \n -Foldername argument is optional. If not provided, the file will be uploaded to preconfigured google drive.\n -File name argument is optional if create directory option is used.\n -Options:\n - -a | --account 'account name' - Use different account than the default one.\n - To change the default account name, use this format, -a/--account default=account_name\n - -la | --list-accounts - Print all configured accounts in the config files.\n - -ca | --create-account 'account name' - To create a new account with the given name if does not already exists.\n - -da | --delete-account 'account name' - To delete an account information from config file. \n - -c | -C | --create-dir - option to create directory. Will provide folder id. Can be used to provide input folder, see README.\n - -r | --root-dir or - google folder ID/URL to which the file/directory is going to upload. - If you want to change the default value, then use this format, -r/--root-dir default=root_folder_id/root_folder_url\n - -s | --skip-subdirs - Skip creation of sub folders and upload all files inside the INPUT folder/sub-folders in the INPUT folder, use this along with -p/--parallel option to speed up the uploads.\n - -p | --parallel - Upload multiple files in parallel, Max value = 10.\n - -f | --[file|folder] - Specify files and folders explicitly in one command, use multiple times for multiple folder/files. See README for more use of this command.\n - -cl | --clone - Upload a gdrive file without downloading, require accessible gdrive link or id as argument.\n - -o | --overwrite - Overwrite the files with the same name, if present in the root folder/input folder, also works with recursive folders.\n - -d | --skip-duplicates - Do not upload the files with the same name and size, if already present in the root folder/input folder, also works with recursive folders.\n - -cm | --check-mode - Additional flag for --overwrite and --skip-duplicates flag. Can be used to change check mode in those flags, available args are 'size' and 'md5'.\n - -desc | --description | --description-all - Specify description for the given file. To use the respective metadata of a file, below is the format:\n - File name ( fullname ): %f | Size: %s | Mime Type: %m\n - Now to actually use it: --description 'Filename: %f, Size: %s, Mime: %m'\n - Note: For files inside folders, use --description-all flag.\n - -S | --share - Share the uploaded input file/folder, grant reader permission to provided email address or to everyone with the shareable link.\n - -SM | -sm | --share-mode 'share mode' - Specify the share mode for sharing file.\n - Share modes are: r / reader - Read only permission.\n - : w / writer - Read and write permission.\n - : c / commenter - Comment only permission.\n - Note: Although this flag is independent of --share flag but when email is needed, then --share flag use is neccessary.\n - --speed 'speed' - Limit the download speed, supported formats: 1K, 1M and 1G.\n - -i | --save-info - Save uploaded files info to the given filename.\n - -z | --config - Override default config file with custom config file.\nIf you want to change default value, then use this format -z/--config default=default=your_config_file_path.\n - -q | --quiet - Supress the normal output, only show success/error upload messages for files, and one extra line at the beginning for folder showing no. of files and sub folders.\n - -R | --retry 'num of retries' - Retry the file upload if it fails, postive integer as argument. Currently only for file uploads.\n - -in | --include 'pattern' - Only include the files with the given pattern to upload - Applicable for folder uploads.\n - e.g: ${0##*/} local_folder --include "*1*", will only include with files with pattern '1' in the name.\n - -ex | --exclude 'pattern' - Exclude the files with the given pattern from uploading. - Applicable for folder uploads.\n - e.g: ${0##*/} local_folder --exclude "*1*", will exclude all the files pattern '1' in the name.\n - --hide - This flag will prevent the script to print sensitive information like root folder id and drivelink.\n - -v | --verbose - Display detailed message (only for non-parallel uploads).\n - -V | --verbose-progress - Display detailed message and detailed upload progress(only for non-parallel uploads).\n - --skip-internet-check - Do not check for internet connection, recommended to use in sync jobs.\n - $([ "${GUPLOAD_INSTALLED_WITH}" = script ] && printf '%s\n' '\n -u | --update - Update the installed script in your system.\n - -U | --uninstall - Uninstall script, remove related files.\n') - --info - Show detailed info, only if script is installed system wide.\n - -D | --debug - Display script command trace.\n - -h | --help - Display this message.\n" - exit 0 -} - -_short_help() { - printf "No valid arguments provided, use -h/--help flag to see usage.\n" - exit 0 -} - -################################################### -# Print the contents of info file if scipt is installed system wide. -# Path is INFO_FILE="${HOME}/.google-drive-upload/google-drive-upload.info" -# Globals: 1 variable -# INFO_FILE -# Arguments: None -# Result: read description -################################################### -_version_info() { - if command -v "${COMMAND_NAME}" 1> /dev/null && [ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]; then - for i in REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA CONFIG; do - printf "%s\n" "${i}=\"$(eval printf "%s" \"\$"${i}"\")\"" - done | sed -e "s/=/: /g" - else - printf "%s\n" "google-drive-upload is not installed system wide." - fi - exit 0 -} - -################################################### -# Function to cleanup config file -# Remove invalid access tokens on the basis of corresponding expiry -# Globals: None -# Arguments: 1 -# ${1} = config file -# Result: read description -################################################### -_cleanup_config() { - config="${1:?Error: Missing config}" && unset values_regex _tmp - - ! [ -f "${config}" ] && return 0 - - while read -r line <&4 && [ -n "${line}" ]; do - expiry_value_name="${line%%=*}" - token_value_name="${expiry_value_name%%_EXPIRY}" - - _tmp="${line##*=}" && _tmp="${_tmp%\"}" && expiry="${_tmp#\"}" - [ "${expiry}" -le "$(date +"%s")" ] && - values_regex="${values_regex:+${values_regex}|}${expiry_value_name}=\".*\"|${token_value_name}=\".*\"" - done 4<< EOF -$(grep -F ACCESS_TOKEN_EXPIRY "${config}" || :) -EOF - - chmod u+w "${config}" && - printf "%s\n" "$(grep -Ev "^\$${values_regex:+|${values_regex}}" "${config}")" >| "${config}" && - chmod "a-w-r-x,u+r" "${config}" - return 0 -} - -################################################### -# Process all arguments given to the script -# Globals: 1 variable, 1 function -# Variable - HOME -# Functions - _short_help -# Arguments: Many -# ${@}" = Flags with argument and file/folder input -# Result: On -# Success - Set all the variables -# Error - Print error message and exit -# Reference: -# Email Regex - https://gist.github.com/guessi/82a73ee7eb2b1216eb9db17bb8d65dd1 -################################################### -_setup_arguments() { - [ $# = 0 ] && printf "Missing arguments\n" && return 1 - # Internal variables - # De-initialize if any variables set already. - unset LIST_ACCOUNTS UPDATE_DEFAULT_ACCOUNT CUSTOM_ACCOUNT_NAME NEW_ACCOUNT_NAME DELETE_ACCOUNT_NAME ACCOUNT_ONLY_RUN - unset FOLDERNAME FINAL_LOCAL_INPUT_ARRAY FINAL_ID_INPUT_ARRAY CONTINUE_WITH_NO_INPUT - unset PARALLEL NO_OF_PARALLEL_JOBS SHARE SHARE_EMAIL SHARE_ROLE OVERWRITE SKIP_DUPLICATES CHECK_MODE SKIP_SUBDIRS DESCRIPTION ROOTDIR QUIET - unset VERBOSE VERBOSE_PROGRESS DEBUG LOG_FILE_ID CURL_SPEED RETRY - export CURL_PROGRESS="-s" EXTRA_LOG=":" CURL_PROGRESS_EXTRA="-s" - INFO_PATH="${HOME}/.google-drive-upload" CONFIG_INFO="${INFO_PATH}/google-drive-upload.configpath" - [ -f "${CONFIG_INFO}" ] && . "${CONFIG_INFO}" - CONFIG="${CONFIG:-${HOME}/.googledrive.conf}" - - # Configuration variables # Remote gDrive variables - unset ROOT_FOLDER CLIENT_ID CLIENT_SECRET REFRESH_TOKEN ACCESS_TOKEN - export API_URL="https://www.googleapis.com" - export API_VERSION="v3" \ - SCOPE="${API_URL}/auth/drive" \ - REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" \ - TOKEN_URL="https://accounts.google.com/o/oauth2/token" - - _check_config() { - [ -z "${1##default=*}" ] && export UPDATE_DEFAULT_CONFIG="_update_config" - { [ -r "${2}" ] && CONFIG="${2}"; } || { - printf "Error: Given config file (%s) doesn't exist/not readable,..\n" "${1}" 1>&2 && exit 1 - } - return 0 - } - - _check_longoptions() { - [ -z "${2}" ] && - printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && - exit 1 - return 0 - } - - while [ $# -gt 0 ]; do - case "${1}" in - -h | --help) _usage ;; - -D | --debug) DEBUG="true" && export DEBUG ;; - --info) _version_info ;; - -a | --account) - _check_longoptions "${1}" "${2}" - export CUSTOM_ACCOUNT_NAME="${2##default=}" && shift - [ -z "${2##default=*}" ] && export UPDATE_DEFAULT_ACCOUNT="_update_config" - ;; - -la | --list-accounts) export LIST_ACCOUNTS="true" ;; - # this flag is preferred over --account - -ca | --create-account) - _check_longoptions "${1}" "${2}" - export NEW_ACCOUNT_NAME="${2}" && shift - ;; - -da | --delete-account) - _check_longoptions "${1}" "${2}" - export DELETE_ACCOUNT_NAME="${2}" && shift - ;; - -c | -C | --create-dir) - _check_longoptions "${1}" "${2}" - FOLDERNAME="${2}" && shift - ;; - -r | --root-dir) - _check_longoptions "${1}" "${2}" - ROOTDIR="${2##default=}" - [ -z "${2##default=*}" ] && export UPDATE_DEFAULT_ROOTDIR="_update_config" - shift - ;; - -z | --config) - _check_longoptions "${1}" "${2}" - _check_config "${2}" "${2##default=}" - shift - ;; - -i | --save-info) - _check_longoptions "${1}" "${2}" - export LOG_FILE_ID="${2}" && shift - ;; - -s | --skip-subdirs) export SKIP_SUBDIRS="true" ;; - -p | --parallel) - _check_longoptions "${1}" "${2}" - NO_OF_PARALLEL_JOBS="${2}" - if [ "$((NO_OF_PARALLEL_JOBS))" -gt 0 ] 2>| /dev/null 1>&2; then - NO_OF_PARALLEL_JOBS="$((NO_OF_PARALLEL_JOBS > 10 ? 10 : NO_OF_PARALLEL_JOBS))" - else - printf "\nError: -p/--parallel value ranges between 1 to 10.\n" - exit 1 - fi - export PARALLEL_UPLOAD="parallel" && shift - ;; - -o | --overwrite) export OVERWRITE="Overwrite" UPLOAD_MODE="update" ;; - -d | --skip-duplicates) export SKIP_DUPLICATES="Skip Existing" UPLOAD_MODE="update" ;; - -cm | --check-mode) - _check_longoptions "${1}" "${2}" - case "${2}" in - size) export CHECK_MODE="2" && shift ;; - md5) export CHECK_MODE="3" && shift ;; - *) printf "\nError: -cm/--check-mode takes size and md5 as argument.\n" ;; - esac - ;; - -desc | --description | --description-all) - _check_longoptions "${1}" "${2}" - [ "${1}" = "--description-all" ] && export DESCRIPTION_ALL="true" - export DESCRIPTION="${2}" && shift - ;; - -f | --file | --folder) - _check_longoptions "${1}" "${2}" - LOCAL_INPUT_ARRAY="${LOCAL_INPUT_ARRAY} - ${2}" && shift - ;; - -cl | --clone) - _check_longoptions "${1}" "${2}" - FINAL_ID_INPUT_ARRAY="${FINAL_ID_INPUT_ARRAY} - $(_extract_id "${2}")" && shift - ;; - -S | --share) - SHARE="_share_id" - EMAIL_REGEX="^(([A-Za-z0-9]+((\.|\-|\_|\+)?[A-Za-z0-9]?)*[A-Za-z0-9]+)|[A-Za-z0-9]+)@(([A-Za-z0-9]+)+((\.|\-|\_)?([A-Za-z0-9]+)+)*)+\.([A-Za-z]{2,})+$" - case "${2}" in - -* | '') : ;; - *) - if printf "%s\n" "${2}" | grep -qE "${EMAIL_REGEX}"; then - SHARE_EMAIL="${2}" && shift && export SHARE_EMAIL - fi - ;; - esac - SHARE_ROLE="${SHARE_ROLE:-reader}" - ;; - -[Ss][Mm] | --share-mode) - _check_longoptions "${1}" "${2}" - case "${2}" in - r | read*) SHARE_ROLE="reader" ;; - w | write*) SHARE_ROLE="writer" ;; - c | comment*) SHARE_ROLE="commenter" ;; - *) - printf "%s\n" "Invalid share mode given ( ${2} ). Supported values are r or reader / w or writer / c or commenter." && - exit 1 - ;; - esac - SHARE="_share_id" - shift - ;; - --speed) - _check_longoptions "${1}" "${2}" - regex='^([0-9]+)([k,K]|[m,M]|[g,G])+$' - if printf "%s\n" "${2}" | grep -qE "${regex}"; then - export CURL_SPEED="--limit-rate ${2}" && shift - else - printf "Error: Wrong speed limit format, supported formats: 1K , 1M and 1G\n" 1>&2 - exit 1 - fi - ;; - -R | --retry) - _check_longoptions "${1}" "${2}" - if [ "$((2))" -gt 0 ] 2>| /dev/null 1>&2; then - export RETRY="${2}" && shift - else - printf "Error: -R/--retry only takes positive integers as arguments, min = 1, max = infinity.\n" - exit 1 - fi - ;; - -in | --include) - _check_longoptions "${1}" "${2}" - INCLUDE_FILES="${INCLUDE_FILES} -name '${2}' " && shift - ;; - -ex | --exclude) - _check_longoptions "${1}" "${2}" - EXCLUDE_FILES="${EXCLUDE_FILES} ! -name '${2}' " && shift - ;; - --hide) HIDE_INFO=":" ;; - -q | --quiet) export QUIET="_print_center_quiet" ;; - -v | --verbose) export VERBOSE="true" ;; - -V | --verbose-progress) export VERBOSE_PROGRESS="true" ;; - --skip-internet-check) export SKIP_INTERNET_CHECK=":" ;; - '') shorthelp ;; - *) # Check if user meant it to be a flag - if [ -z "${1##-*}" ]; then - [ "${GUPLOAD_INSTALLED_WITH}" = script ] && { - case "${1}" in - -u | --update) - _check_debug && _update && { exit 0 || exit 1; } - ;; - --uninstall) - _check_debug && _update uninstall && { exit 0 || exit 1; } - ;; - esac - } - printf '%s: %s: Unknown option\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && exit 1 - else - case "${1}" in - *drive.google.com* | *docs.google.com*) - FINAL_ID_INPUT_ARRAY="${FINAL_ID_INPUT_ARRAY} - $(_extract_id "${1}")" - ;; - *) - LOCAL_INPUT_ARRAY="${LOCAL_INPUT_ARRAY} - ${1}" - ;; - esac - fi - ;; - esac - shift - done - - _check_debug - - [ -n "${VERBOSE_PROGRESS}" ] && unset VERBOSE && export CURL_PROGRESS="" - [ -n "${QUIET}" ] && export CURL_PROGRESS="-s" - - # create info path folder, can be missing if gupload was not installed with install.sh - mkdir -p "${INFO_PATH}" || return 1 - - # handle account related flags here as we want to use the flags independenlty even with no normal valid inputs - # delete account, --delete-account flag - # TODO: add support for deleting multiple accounts - [ -n "${DELETE_ACCOUNT_NAME}" ] && _delete_account "${DELETE_ACCOUNT_NAME}" - # list all configured accounts, --list-accounts flag - [ -n "${LIST_ACCOUNTS}" ] && _all_accounts - - # If no input, then check if -C option was used or not. - # check if given input exists ( file/folder ) - FINAL_LOCAL_INPUT_ARRAY="$(printf "%s\n" "${LOCAL_INPUT_ARRAY}" | while read -r input && { [ -n "${input}" ] || continue; }; do - { [ -r "${input}" ] && printf "%s\n" "${input}"; } || { - { "${QUIET:-_print_center}" 'normal' "[ Error: Invalid Input - ${input} ]" "=" && printf "\n"; } 1>&2 - continue - } - done)" - - # If no input, then check if either -C option was used. - [ -z "${FINAL_LOCAL_INPUT_ARRAY:-${FINAL_ID_INPUT_ARRAY:-${FOLDERNAME}}}" ] && { - # if any account related option was used then don't show short help - [ -z "${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-${NEW_ACCOUNT_NAME}}}" ] && _short_help - # exit right away if --list-accounts or --delete-account flag was used - [ -n "${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-}}" ] && exit 0 - # don't exit right away when new account is created but also let the rootdir stuff execute - [ -n "${NEW_ACCOUNT_NAME}" ] && CONTINUE_WITH_NO_INPUT="true" - } - - # set CHECK_MODE if empty, below are check mode values - # 1 = check only name, 2 = check name and size, 3 = check name and md5sum - [ -z "${CHECK_MODE}" ] && { - case "${SKIP_DUPLICATES:-${OVERWRITE}}" in - "Overwrite") export CHECK_MODE="1" ;; - "Skip Existing") export CHECK_MODE="2" ;; - esac - } - - return 0 -} - -################################################### -# Setup root directory where all file/folders will be uploaded/updated -# Globals: 5 variables, 6 functions -# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET -# Functions - _print_center, _drive_info, _extract_id, _update_config, _json_value, _set_value -# Arguments: None -# Result: read description -# If root id not found then print message and exit -# Update config with root id and root id name if specified -# Reference: -# https://github.com/dylanaraps/pure-bash-bible#use-read-as-an-alternative-to-the-sleep-command -################################################### -_setup_root_dir() { - _check_root_id() { - _setup_root_dir_json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id")" - if ! rootid_setup_root_dir="$(printf "%s\n" "${_setup_root_dir_json}" | _json_value id 1 1)"; then - if printf "%s\n" "${_setup_root_dir_json}" | grep "File not found" -q; then - "${QUIET:-_print_center}" "justify" "Given root folder" " ID/URL invalid." "=" 1>&2 - else - printf "%s\n" "${_setup_root_dir_json}" 1>&2 - fi - return 1 - fi - - ROOT_FOLDER="${rootid_setup_root_dir}" - "${1:-:}" "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" "${ROOT_FOLDER}" "${CONFIG}" || return 1 - return 0 - } - _check_root_id_name() { - ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" | _json_value name 1 1 || :)" - "${1:-:}" "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER_NAME" "${ROOT_FOLDER_NAME}" "${CONFIG}" || return 1 - return 0 - } - - _set_value indirect ROOT_FOLDER "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" - _set_value indirect ROOT_FOLDER_NAME "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER_NAME" - - if [ -n "${ROOTDIR:-}" ]; then - ROOT_FOLDER="${ROOTDIR}" && { _check_root_id "${UPDATE_DEFAULT_ROOTDIR}" || return 1; } && unset ROOT_FOLDER_NAME - elif [ -z "${ROOT_FOLDER}" ]; then - { [ -t 1 ] && "${QUIET:-_print_center}" "normal" "Enter root folder ID or URL, press enter for default ( root )" " " && printf -- "-> " && - read -r ROOT_FOLDER && [ -n "${ROOT_FOLDER}" ] && { _check_root_id _update_config || return 1; }; } || { - ROOT_FOLDER="root" - _update_config "ACCOUNT_${ACCOUNT_NAME}_ROOT_FOLDER" "${ROOT_FOLDER}" "${CONFIG}" || return 1 - } && printf "\n\n" - elif [ -z "${ROOT_FOLDER_NAME}" ]; then - _check_root_id_name _update_config || return 1 # update default root folder name if not available - fi - - # fetch root folder name if rootdir different than default - [ -z "${ROOT_FOLDER_NAME}" ] && { _check_root_id_name "${UPDATE_DEFAULT_ROOTDIR}" || return 1; } - - return 0 -} - -################################################### -# Setup Workspace folder -# Check if the given folder exists in google drive. -# If not then the folder is created in google drive under the configured root folder. -# Globals: 2 variables, 3 functions -# Variables - FOLDERNAME, ROOT_FOLDER -# Functions - _create_directory, _drive_info, _json_value -# Arguments: None -# Result: Read Description -################################################### -_setup_workspace() { - if [ -z "${FOLDERNAME}" ]; then - WORKSPACE_FOLDER_ID="${ROOT_FOLDER}" - WORKSPACE_FOLDER_NAME="${ROOT_FOLDER_NAME}" - else - WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}")" || - { printf "%s\n" "${WORKSPACE_FOLDER_ID}" 1>&2 && return 1; } - WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name | _json_value name 1 1)" || - { printf "%s\n" "${WORKSPACE_FOLDER_NAME}" 1>&2 && return 1; } - fi - return 0 -} - -################################################### -# Process all the values in "${FINAL_LOCAL_INPUT_ARRAY[@]}" & "${FINAL_ID_INPUT_ARRAY[@]}" -# Globals: 22 variables, 17 functions -# Variables - FINAL_LOCAL_INPUT_ARRAY ( array ), ACCESS_TOKEN, VERBOSE, VERBOSE_PROGRESS -# WORKSPACE_FOLDER_ID, UPLOAD_MODE, SKIP_DUPLICATES, OVERWRITE, SHARE, -# UPLOAD_STATUS, COLUMNS, API_URL, API_VERSION, TOKEN_URL, LOG_FILE_ID -# FILE_ID, FILE_LINK, FINAL_ID_INPUT_ARRAY ( array ) -# PARALLEL_UPLOAD, QUIET, NO_OF_PARALLEL_JOBS, TMPFILE, SHARE_ROLE -# Functions - _print_center, _clear_line, _newline, _support_ansi_escapes, _print_center_quiet -# _upload_file, _share_id, _is_terminal, _dirname, -# _create_directory, _json_value, _url_encode, _check_existing_file, _bytes_to_human -# _clone_file, _get_access_token_and_update, _get_rootdir_id -# Arguments: None -# Result: Upload/Clone all the input files/folders, if a folder is empty, print Error message. -################################################### -_process_arguments() { - export SOURCE_UTILS - # on successful uploads - _share_and_print_link() { - "${SHARE:-:}" "${1:-}" "${SHARE_ROLE}" "${SHARE_EMAIL}" - [ -z "${HIDE_INFO}" ] && { - _print_center "justify" "DriveLink" "${SHARE:+ (SHARED[$(printf "%.1s" "${SHARE_ROLE}")])}" "-" - _support_ansi_escapes && [ "$((COLUMNS))" -gt 45 ] 2>| /dev/null && _print_center "normal" '^ ^ ^' ' ' - "${QUIET:-_print_center}" "normal" "https://drive.google.com/open?id=${1:-}" " " - } - return 0 - } - - unset Aseen && while read -r input <&4 && - case "${Aseen}" in - *"|:_//_:|${input}|:_//_:|"*) continue ;; - *) Aseen="${Aseen}|:_//_:|${input}|:_//_:|" ;; - esac do - # Check if the argument is a file or a directory. - if [ -f "${input}" ]; then - # export DESCRIPTION_FILE, used for descriptions in _upload_file function - export DESCRIPTION_FILE="${DESCRIPTION}" - - _print_center "justify" "Given Input" ": FILE" "=" - _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" - _upload_file_main noparse "${input}" "${WORKSPACE_FOLDER_ID}" - if [ "${RETURN_STATUS}" = 1 ]; then - _share_and_print_link "${FILE_ID}" - printf "\n" - else - for _ in 1 2; do _clear_line 1; done && continue - fi - elif [ -d "${input}" ]; then - input="$(cd "${input}" && pwd)" || return 1 # to handle dirname when current directory (.) is given as input. - unset EMPTY # Used when input folder is empty - - # export DESCRIPTION_FILE only if DESCRIPTION_ALL var is available, used for descriptions in _upload_file function - export DESCRIPTION_FILE="${DESCRIPTION_ALL+:${DESCRIPTION}}" - - _print_center "justify" "Given Input" ": FOLDER" "-" - _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" - FOLDER_NAME="${input##*/}" && "${EXTRA_LOG}" "justify" "Folder: ${FOLDER_NAME}" "=" - - NEXTROOTDIRID="${WORKSPACE_FOLDER_ID}" - - "${EXTRA_LOG}" "justify" "Processing folder.." "-" - - [ -z "${SKIP_SUBDIRS}" ] && "${EXTRA_LOG}" "justify" "Indexing subfolders.." "-" - # Do not create empty folders during a recursive upload. Use of find in this section is important. - DIRNAMES="$(find "${input}" -type d -not -empty)" - NO_OF_FOLDERS="$(($(printf "%s\n" "${DIRNAMES}" | wc -l)))" && NO_OF_SUB_FOLDERS="$((NO_OF_FOLDERS - 1))" - [ -z "${SKIP_SUBDIRS}" ] && _clear_line 1 - [ "${NO_OF_SUB_FOLDERS}" = 0 ] && SKIP_SUBDIRS="true" - - "${EXTRA_LOG}" "justify" "Indexing files.." "-" - FILENAMES="$(_tmp='find "'${input}'" -type f -name "*" '${INCLUDE_FILES}' '${EXCLUDE_FILES}'' && eval "${_tmp}")" - _clear_line 1 - - # Skip the sub folders and find recursively all the files and upload them. - if [ -n "${SKIP_SUBDIRS}" ]; then - if [ -n "${FILENAMES}" ]; then - NO_OF_FILES="$(($(printf "%s\n" "${FILENAMES}" | wc -l)))" - for _ in 1 2; do _clear_line 1; done - - "${QUIET:-_print_center}" "justify" "Folder: ${FOLDER_NAME} " "| ${NO_OF_FILES} File(s)" "=" && printf "\n" - "${EXTRA_LOG}" "justify" "Creating folder.." "-" - { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}")" && export ID; } || - { "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; } - _clear_line 1 && DIRIDS="${ID}" - - [ -z "${PARALLEL_UPLOAD:-${VERBOSE:-${VERBOSE_PROGRESS}}}" ] && _newline "\n" - _upload_folder "${PARALLEL_UPLOAD:-normal}" noparse "${FILENAMES}" "${ID}" - [ -n "${PARALLEL_UPLOAD:+${VERBOSE:-${VERBOSE_PROGRESS}}}" ] && _newline "\n\n" - else - for _ in 1 2; do _clear_line 1; done && EMPTY=1 - fi - else - if [ -n "${FILENAMES}" ]; then - NO_OF_FILES="$(($(printf "%s\n" "${FILENAMES}" | wc -l)))" - for _ in 1 2; do _clear_line 1; done - "${QUIET:-_print_center}" "justify" "${FOLDER_NAME} " "| $((NO_OF_FILES)) File(s) | $((NO_OF_SUB_FOLDERS)) Sub-folders" "=" - - _newline "\n" && "${EXTRA_LOG}" "justify" "Creating Folder(s).." "-" && _newline "\n" - unset status - while read -r dir <&4 && { [ -n "${dir}" ] || continue; }; do - [ -n "${status}" ] && __dir="$(_dirname "${dir}")" && - __temp="$(printf "%s\n" "${DIRIDS}" | grep -F "|:_//_:|${__dir}|:_//_:|")" && - NEXTROOTDIRID="${__temp%%"|:_//_:|${__dir}|:_//_:|"}" - - NEWDIR="${dir##*/}" && _print_center "justify" "Name: ${NEWDIR}" "-" 1>&2 - ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}")" || - { "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; } - - # Store sub-folder directory IDs and it's path for later use. - DIRIDS="$(printf "%b%s|:_//_:|%s|:_//_:|\n" "${DIRIDS:+${DIRIDS}\n}" "${ID}" "${dir}")" - - for _ in 1 2; do _clear_line 1 1>&2; done - "${EXTRA_LOG}" "justify" "Status" ": $((status += 1)) / $((NO_OF_FOLDERS))" "=" 1>&2 - done 4<< EOF -$(printf "%s\n" "${DIRNAMES}") -EOF - export DIRIDS - - _clear_line 1 - - _upload_folder "${PARALLEL_UPLOAD:-normal}" parse "${FILENAMES}" - [ -n "${PARALLEL_UPLOAD:+${VERBOSE:-${VERBOSE_PROGRESS}}}" ] && _newline "\n\n" - else - for _ in 1 2 3; do _clear_line 1; done && EMPTY=1 - fi - fi - if [ "${EMPTY}" != 1 ]; then - [ -z "${VERBOSE:-${VERBOSE_PROGRESS}}" ] && for _ in 1 2; do _clear_line 1; done - - FOLDER_ID="$(_tmp="$(printf "%s\n" "${DIRIDS}" | while read -r line; do printf "%s\n" "${line}" && break; done)" && printf "%s\n" "${_tmp%%"|:_//_:|"*}")" - - [ "${SUCCESS_STATUS}" -gt 0 ] && _share_and_print_link "${FOLDER_ID}" - - _newline "\n" - [ "${SUCCESS_STATUS}" -gt 0 ] && "${QUIET:-_print_center}" "justify" "Total Files " "Uploaded: ${SUCCESS_STATUS}" "=" - [ "${ERROR_STATUS}" -gt 0 ] && "${QUIET:-_print_center}" "justify" "Total Files " "Failed: ${ERROR_STATUS}" "=" && { - # If running inside a terminal, then check if failed files are more than 25, if not, then print, else save in a log file - if [ -t 1 ]; then - { [ "${ERROR_STATUS}" -le 25 ] && printf "%s\n" "${ERROR_FILES}"; } || { - epoch_time="$(date +'%s')" log_file_name="${0##*/}_${FOLDER_NAME}_${epoch_time}.failed" - # handle in case the vivid random file name was already there - i=0 && until ! [ -f "${log_file_name}" ]; do - : $((i += 1)) && log_file_name="${0##*/}_${FOLDER_NAME}_$((epoch_time + i)).failed" - done - printf "%s\n%s\n%s\n\n%s\n%s\n" \ - "Folder name: ${FOLDER_NAME} | Folder ID: ${FOLDER_ID}" \ - "Run this command to retry the failed uploads:" \ - " ${0##*/} --skip-duplicates \"${input}\" --root-dir \"${NEXTROOTDIRID}\" ${SKIP_SUBDIRS:+-s} ${PARALLEL_UPLOAD:+--parallel} ${PARALLEL_UPLOAD:+${NO_OF_PARALLEL_JOBS}}" \ - "Failed files:" \ - "${ERROR_FILES}" >> "${log_file_name}" - printf "%s\n" "To see the failed files, open \"${log_file_name}\"" - printf "%s\n" "To retry the failed uploads only, use -d / --skip-duplicates flag. See log file for more help." - } - # if not running inside a terminal, print it all - else - printf "%s\n" "${ERROR_FILES}" - fi - } - printf "\n" - else - for _ in 1 2 3; do _clear_line 1; done - "${QUIET:-_print_center}" 'justify' "Empty Folder" ": ${FOLDER_NAME}" "=" 1>&2 - printf "\n" - fi - fi - done 4<< EOF -$(printf "%s\n" "${FINAL_LOCAL_INPUT_ARRAY}") -EOF - - unset Aseen && while read -r gdrive_id <&4 && { [ -n "${gdrive_id}" ] || continue; } && - case "${Aseen}" in - *"|:_//_:|${gdrive_id}|:_//_:|"*) continue ;; - *) Aseen="${Aseen}|:_//_:|${gdrive_id}|:_//_:|" ;; - esac do - _print_center "justify" "Given Input" ": ID" "=" - "${EXTRA_LOG}" "justify" "Checking if id exists.." "-" - [ "${CHECK_MODE}" = "md5Checksum" ] && param="md5Checksum" - json="$(_drive_info "${gdrive_id}" "name,mimeType,size${param:+,${param}}")" || : - if ! printf "%s\n" "${json}" | _json_value code 1 1 2>| /dev/null 1>&2; then - type="$(printf "%s\n" "${json}" | _json_value mimeType 1 1 || :)" - name="$(printf "%s\n" "${json}" | _json_value name 1 1 || :)" - size="$(printf "%s\n" "${json}" | _json_value size 1 1 || :)" - [ "${CHECK_MODE}" = "md5Checksum" ] && md5="$(printf "%s\n" "${json}" | _json_value md5Checksum 1 1 || :)" - for _ in 1 2; do _clear_line 1; done - case "${type}" in - *folder*) - # export DESCRIPTION_FILE only if DESCRIPTION_ALL var is available, used for descriptions in _clone_file function - export DESCRIPTION_FILE="${DESCRIPTION_ALL+:${DESCRIPTION}}" - - "${QUIET:-_print_center}" "justify" "Folder not supported." "=" 1>&2 && _newline "\n" 1>&2 && continue - ## TODO: Add support to clone folders - ;; - *) - # export DESCRIPTION_FILE, used for descriptions in _clone_file function - export DESCRIPTION_FILE="${DESCRIPTION}" - - _print_center "justify" "Given Input" ": File ID" "=" - _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" - _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${name}" "${size}" "${md5}" || - { for _ in 1 2; do _clear_line 1; done && continue; } - ;; - esac - _share_and_print_link "${FILE_ID}" - printf "\n" - else - _clear_line 1 - "${QUIET:-_print_center}" "justify" "File ID (${HIDE_INFO:-gdrive_id})" " invalid." "=" 1>&2 - printf "\n" - fi - done 4<< EOF -$(printf "%s\n" "${FINAL_ID_INPUT_ARRAY}") -EOF - return 0 -} - -main() { - [ $# = 0 ] && _short_help - - if [ -z "${SELF_SOURCE}" ]; then - export UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" - export SOURCE_UTILS='. '${UTILS_FOLDER}/auth-utils.sh' && . '${UTILS_FOLDER}/common-utils.sh' && . '${UTILS_FOLDER}/drive-utils.sh' && . '${UTILS_FOLDER}/upload-utils.sh'' - else - SCRIPT_PATH="$(cd "$(_dirname "${0}")" && pwd)/${0##*\/}" && export SCRIPT_PATH - export SOURCE_UTILS='SOURCED_GUPLOAD=true . '${SCRIPT_PATH}'' - fi - eval "${SOURCE_UTILS}" || { printf "Error: Unable to source util files.\n" && exit 1; } - - set -o errexit -o noclobber - - _setup_arguments "${@}" || exit 1 - "${SKIP_INTERNET_CHECK:-_check_internet}" || exit 1 - - { { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || - TMPFILE="$(pwd)/.$(_t="$(date +'%s')" && printf "%s\n" "$((_t * _t))").LOG"; } || exit 1 - export TMPFILE - - _cleanup() { - # unhide the cursor if hidden - [ -n "${SUPPORT_ANSI_ESCAPES}" ] && printf "\033[?25h\033[?7h" - { - # update the config with latest ACCESS_TOKEN and ACCESS_TOKEN_EXPIRY only if changed - [ -f "${TMPFILE}_ACCESS_TOKEN" ] && { - . "${TMPFILE}_ACCESS_TOKEN" - [ "${INITIAL_ACCESS_TOKEN}" = "${ACCESS_TOKEN}" ] || { - _update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN" "${ACCESS_TOKEN}" "${CONFIG}" - _update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN_EXPIRY" "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}" - } - } || : 1>| /dev/null - - # grab all chidren processes of access token service - # https://askubuntu.com/a/512872 - [ -n "${ACCESS_TOKEN_SERVICE_PID}" ] && { - token_service_pids="$(ps --ppid="${ACCESS_TOKEN_SERVICE_PID}" -o pid=)" - # first kill parent id, then children processes - kill "${ACCESS_TOKEN_SERVICE_PID}" - } || : 1>| /dev/null - - # grab all script children pids - script_children_pids="$(ps --ppid="${MAIN_PID}" -o pid=)" - - # kill all grabbed children processes - # shellcheck disable=SC2086 - kill ${token_service_pids} ${script_children_pids} 1>| /dev/null - - rm -f "${TMPFILE:?}"* - - export abnormal_exit && if [ -n "${abnormal_exit}" ]; then - printf "\n\n%s\n" "Script exited manually." - kill -9 -$$ & - else - { _cleanup_config "${CONFIG}" && [ "${GUPLOAD_INSTALLED_WITH}" = script ] && _auto_update; } 1>| /dev/null & - fi - } 2>| /dev/null || : - return 0 - } - - trap 'abnormal_exit="1" ; exit' INT TERM - trap '_cleanup' EXIT - trap '' TSTP # ignore ctrl + z - - export MAIN_PID="$$" - - START="$(date +'%s')" - - "${EXTRA_LOG}" "justify" "Checking credentials.." "-" - { _check_credentials && _clear_line 1; } || - { "${QUIET:-_print_center}" "normal" "[ Error: Credentials checking failed ]" "=" && exit 1; } - "${QUIET:-_print_center}" "normal" " Account: ${ACCOUNT_NAME} " "=" - - "${EXTRA_LOG}" "justify" "Checking root dir.." "-" - { _setup_root_dir && _clear_line 1; } || - { "${QUIET:-_print_center}" "normal" "[ Error: Rootdir setup failed ]" "=" && exit 1; } - _print_center "justify" "Root dir properly configured." "=" - - # only execute next blocks if there was some input - [ -n "${CONTINUE_WITH_NO_INPUT}" ] && exit 0 - - "${EXTRA_LOG}" "justify" "Checking Workspace Folder.." "-" - { _setup_workspace && for _ in 1 2; do _clear_line 1; done; } || - { "${QUIET:-_print_center}" "normal" "[ Error: Workspace setup failed ]" "=" && exit 1; } - _print_center "justify" "Workspace Folder: ${WORKSPACE_FOLDER_NAME}" "=" - "${HIDE_INFO:-_print_center}" "normal" " ${WORKSPACE_FOLDER_ID} " "-" && _newline "\n" - - # hide the cursor if ansi escapes are supported - [ -n "${SUPPORT_ANSI_ESCAPES}" ] && printf "\033[?25l" - - _process_arguments - - END="$(date +'%s')" - DIFF="$((END - START))" - "${QUIET:-_print_center}" 'normal' " Time Elapsed: $((DIFF / 60)) minute(s) and $((DIFF % 60)) seconds " "=" -} - -{ [ -z "${SOURCED_GUPLOAD}" ] && main "${@}"; } || : diff --git a/src/.editorconfig b/src/.editorconfig new file mode 100644 index 0000000..c3d0034 --- /dev/null +++ b/src/.editorconfig @@ -0,0 +1,29 @@ +# EditorConfig is awesome: https://EditorConfig.org + +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +# for shfmt +[*.sh] +indent_style = space +indent_size = 4 +shell_variant = posix +switch_case_indent = true +space_redirects = true + +[*.bash] +indent_style = space +indent_size = 4 +shell_variant = bash +switch_case_indent = true +space_redirects = true + +[*.md] +trim_trailing_whitespace = false diff --git a/bash/.gitignore b/src/.gitignore similarity index 100% rename from bash/.gitignore rename to src/.gitignore diff --git a/bash/.editorconfig b/src/bash/.editorconfig similarity index 100% rename from bash/.editorconfig rename to src/bash/.editorconfig diff --git a/src/bash/common-utils.bash b/src/bash/common-utils.bash new file mode 100644 index 0000000..38c1ca4 --- /dev/null +++ b/src/bash/common-utils.bash @@ -0,0 +1,121 @@ +#!/usr/bin/env bash +# Functions that will used in core script + +################################################### +# Check if something contains some +# Arguments: +# ${1} = pattern to match, can be regex +# ${2} = string where it should match the pattern +# Result: return 0 or 1 +################################################### +_assert_regex() { + declare pattern="${1:?Error: Missing pattern}" string="${2:?Missing string}" + if [[ ${string} =~ ${pattern} ]]; then + return 0 + else + return 1 + fi +} + +################################################### +# Alternative to cat command, just the basic function +# Arguments: 1 or many, file names +################################################### +cat() { + for file in "${@}"; do + printf "%s\n" "$(< "${file}")" + done +} + +################################################### +# Alternative to wc -l command +# Arguments: 1 or pipe +# ${1} = file, _count < file +# variable, _count <<< variable +# pipe = echo something | _count +# Result: Read description +# Reference: +# https://github.com/dylanaraps/pure-bash-bible#get-the-number-of-lines-in-a-file +################################################### +_count() { + mapfile -tn 0 lines + printf '%s\n' "${#lines[@]}" +} + +################################################### +# Print epoch seconds +################################################### +_epoch() { + printf "%(%s)T\\n" "-1" +} + +################################################### +# fetch column size and check if greater than the num ( see in function) +# set trap on sigwinch to update COLUMNS variable +# return 1 or 0 +################################################### +_required_column_size() { + shopt -s checkwinsize && (: && :) + if [[ ${COLUMNS} -gt 45 ]]; then + trap 'shopt -s checkwinsize; (:;:)' SIGWINCH + return 0 + else + return 1 + fi +} + +################################################### +# Evaluates value1=value2 +# Arguments: 3 +# ${1} = direct ( d ) or indirect ( i ) - ( evaluation mode ) # ${2} = var name +# ${3} = var value +# Result: export value1=value2 +################################################### +_set_value() { + case "${1:?}" in + d | direct) export "${2:?}=${3}" ;; + i | indirect) export "${2:?}=${!3}" ;; + *) return 1 ;; + esac +} + +################################################### +# remove the given character from the given string +# 1st arg - character +# 2nd arg - string +# 3rd arg - var where to save the output +# print trimmed string if 3rd arg empty else set +################################################### +_trim() { + declare char="${1}" str="${2}" var="${3}" + + if [[ -n ${var} ]]; then + _set_value d "${var}" "${str//${char}/}" + else + printf "%s" "${str//${char}/}" + fi +} + +################################################### +# Encode the given string to parse properly in network requests +# Arguments: 1 +# ${1} = string +# Result: print encoded string +# Reference: +# https://github.com/dylanaraps/pure-bash-bible#percent-encode-a-string +################################################### +_url_encode() { + declare LC_ALL=C + for ((i = 0; i < ${#1}; i++)); do + : "${1:i:1}" + case "${_}" in + [a-zA-Z0-9.~_-]) + printf '%s' "${_}" + ;; + *) + printf '%%%02X' "'${_}" + ;; + esac + done 2>| /dev/null + printf '\n' +} diff --git a/sh/.editorconfig b/src/common/.editorconfig similarity index 100% rename from sh/.editorconfig rename to src/common/.editorconfig diff --git a/sh/.gitignore b/src/common/.gitignore similarity index 100% rename from sh/.gitignore rename to src/common/.gitignore diff --git a/sh/auth-utils.sh b/src/common/auth-utils.sh old mode 100755 new mode 100644 similarity index 84% rename from sh/auth-utils.sh rename to src/common/auth-utils.sh index fc964f6..7338fe1 --- a/sh/auth-utils.sh +++ b/src/common/auth-utils.sh @@ -4,22 +4,19 @@ ################################################### # Check if account name is valid by a regex expression -# Globals: None # Arguments: 1 # ${1} = Account name # Result: read description and return 1 or 0 ################################################### _account_name_valid() { name_account_name_valid="${1:?}" account_name_regex_account_name_valid='^([A-Za-z0-9_])+$' - printf "%s\n" "${name_account_name_valid}" | grep -qE "${account_name_regex_account_name_valid}" || return 1 + _assert_regex "${account_name_regex_account_name_valid}" "${name_account_name_valid}" || return 1 return 0 } ################################################### # Check if account exists # First check if the given account is in correct format -# Globals: 2 functions -# _set_value, _account_name_valid # Arguments: 1 # ${1} = Account name # Result: read description and return 1 or 0 @@ -36,14 +33,11 @@ _account_exists() { ################################################### # Show all accounts configured in config file -# Globals: 2 variables, 4 functions -# Variable - CONFIG, QUIET -# Functions - _account_exists, _set_value, _print_center, _reload_config -# Arguments: None # Result: SHOW all accounts, export COUNT and ACC_${count}_ACC dynamic variables # or print "No accounts configured yet." ################################################### _all_accounts() { + export CONFIG QUIET { _reload_config && _handle_old_config; } || return 1 COUNT=0 while read -r account <&4 && [ -n "${account}" ]; do @@ -51,7 +45,7 @@ _all_accounts() { { [ "${COUNT}" = 0 ] && "${QUIET:-_print_center}" "normal" " All available accounts. " "=" || :; } && printf "%b" "$((COUNT += 1)). ${account} \n" && _set_value direct "ACC_${COUNT}_ACC" "${account}" done 4<< EOF -$(grep -oE '^ACCOUNT_.*_CLIENT_ID' "${CONFIG}" | sed -e "s/ACCOUNT_//g" -e "s/_CLIENT_ID//g") +$(grep -oE '^ACCOUNT_.*_CLIENT_ID' -- "${CONFIG}" | sed -e "s/ACCOUNT_//g" -e "s/_CLIENT_ID//g") EOF { [ "${COUNT}" -le 0 ] && "${QUIET:-_print_center}" "normal" " No accounts configured yet. " "=" 1>&2; } || printf '\n' return 0 @@ -61,14 +55,12 @@ EOF # Setup a new account name # If given account name is configured already, then ask for name # after name has been properly setup, export ACCOUNT_NAME var -# Globals: 1 variable, 5 functions -# Variable - QUIET -# Functions - _print_center, _account_exists, _clear_line, _account_name_valid, _reload_config # Arguments: 1 # ${1} = Account name ( optional ) # Result: read description and export ACCOUNT_NAME NEW_ACCOUNT_NAME ################################################### _set_new_account_name() { + export QUIET NEW_ACCOUNT_NAME _reload_config || return 1 new_account_name_set_new_account_name="${1:-}" && unset name_valid_set_new_account_name [ -z "${new_account_name_set_new_account_name}" ] && { @@ -83,7 +75,7 @@ _set_new_account_name() { "${QUIET:-_print_center}" "normal" " Warning: Given account ( ${new_account_name_set_new_account_name} ) already exists, input different name. " "-" 1>&2 unset new_account_name_set_new_account_name && continue else - export new_account_name_set_new_account_name="${new_account_name_set_new_account_name}" ACCOUNT_NAME="${new_account_name_set_new_account_name}" && + export new_account_name_set_new_account_name="${new_account_name_set_new_account_name}" NEW_ACCOUNT_NAME="${new_account_name_set_new_account_name}" && name_valid_set_new_account_name="true" && continue fi else @@ -105,21 +97,18 @@ _set_new_account_name() { ################################################### # Delete a account from config file -# Globals: 2 variables, 3 functions -# Variables - CONFIG, QUIET -# Functions - _account_exists, _print_center, _reload_config -# Arguments: None # Result: check if account exists and delete from config, else print error message ################################################### _delete_account() { + export CONFIG QUIET { _reload_config && _handle_old_config; } || return 1 account_delete_account="${1:?Error: give account name}" && unset regex_delete_account config_without_values_delete_account if _account_exists "${account_delete_account}"; then regex_delete_account="^ACCOUNT_${account_delete_account}_(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)|DEFAULT_ACCOUNT=\"${account_delete_account}\"" - config_without_values_delete_account="$(grep -vE "${regex_delete_account}" "${CONFIG}")" - chmod u+w "${CONFIG}" || return 1 # change perms to edit + config_without_values_delete_account="$(grep -vE "${regex_delete_account}" -- "${CONFIG}")" + chmod u+w -- "${CONFIG}" || return 1 # change perms to edit printf "%s\n" "${config_without_values_delete_account}" >| "${CONFIG}" || return 1 - chmod "a-w-r-x,u+r" "${CONFIG}" || return 1 # restore perms + chmod "a-w-r-x,u+r" -- "${CONFIG}" || return 1 # restore perms "${QUIET:-_print_center}" "normal" " Successfully deleted account ( ${account_delete_account} ) from config. " "-" else "${QUIET:-_print_center}" "normal" " Error: Cannot delete account ( ${account_delete_account} ) from config. No such account exists " "-" 1>&2 @@ -131,14 +120,10 @@ _delete_account() { # handle legacy config # this will be triggered only if old config values are present, convert to new format # new account will be created with "default" name, if default already taken, then add a number as suffix -# Globals: 7 variables, 2 functions -# Variables - CLIENT_ID CLIENT_SECRET, REFRESH_TOKEN, ROOT_FOLDER, ROOT_FOLDER_NAME CONFIG, ACCOUNT_NAME -# Functions - _account_exists, _reload_config -# Arguments: None ################################################### _handle_old_config() { - export CLIENT_ID CLIENT_SECRET REFRESH_TOKEN # to handle a shellcheck warning - # only try to convert the if all three values are present + # to handle a shellcheck warning + export CLIENT_ID CLIENT_SECRET REFRESH_TOKEN ROOT_FOLDER ROOT_FOLDER_NAME # only try to convert the if all three values are present [ -n "${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}}" ] && { account_name_handle_old_config="default" regex_check_handle_old_config config_without_values_handle_old_config count_handle_old_config # first try to name the new account as default, otherwise try to add numbers as suffix @@ -146,8 +131,8 @@ _handle_old_config() { account_name_handle_old_config="${account_name_handle_old_config}$((count_handle_old_config += 1))" done regex_check_handle_old_config="^(CLIENT_ID=|CLIENT_SECRET=|REFRESH_TOKEN=|ROOT_FOLDER=|ROOT_FOLDER_NAME=|ACCESS_TOKEN=|ACCESS_TOKEN_EXPIRY=)" - config_without_values_handle_old_config="$(grep -vE "${regex_check_handle_old_config}" "${CONFIG}")" - chmod u+w "${CONFIG}" || return 1 # change perms to edit + config_without_values_handle_old_config="$(grep -vE "${regex_check_handle_old_config}" -- "${CONFIG}")" + chmod u+w -- "${CONFIG}" || return 1 # change perms to edit printf "%s\n%s\n%s\n%s\n%s\n%s\n" \ "ACCOUNT_${account_name_handle_old_config}_CLIENT_ID=\"${CLIENT_ID}\"" \ "ACCOUNT_${account_name_handle_old_config}_CLIENT_SECRET=\"${CLIENT_SECRET}\"" \ @@ -156,7 +141,7 @@ _handle_old_config() { "ACCOUNT_${account_name_handle_old_config}_ROOT_FOLDER_NAME=\"${ROOT_FOLDER_NAME}\"" \ "${config_without_values_handle_old_config}" >| "${CONFIG}" || return 1 - chmod "a-w-r-x,u+r" "${CONFIG}" || return 1 # restore perms + chmod "a-w-r-x,u+r" -- "${CONFIG}" || return 1 # restore perms _reload_config || return 1 # reload config file } @@ -166,16 +151,12 @@ _handle_old_config() { ################################################### # handle old config values, new account creation, custom account name, updating default config and account # start token service if applicable -# Globals: 12 variables, 7 functions -# Variables - DEFAULT_CONFIG, NEW_ACCOUNT_NAME, CUSTOM_ACCOUNT_NAME, DELETE_ACCOUNT_NAME, LIST_ACCOUNTS, QUIET -# UPDATE_DEFAULT_ACCOUNT, UPDATE_DEFAULT_CONFIG, CONFIG_INFO, CONTINUE_WITH_NO_INPUT -# Functions - _reload_config, _handle_old_config, _set_new_account_name, _account_exists, _all_accounts -# _check_account_credentials, _token_bg_service, _print_center, _update_config, _set_value -# Arguments: None # Result: read description and start access token check in bg if required ################################################### _check_credentials() { + export CONFIG CONFIG_INFO DEFAULT_ACCOUNT NEW_ACCOUNT_NAME CUSTOM_ACCOUNT_NAME QUIET COUNT { _reload_config && _handle_old_config; } || return 1 + # set account name to default account name ACCOUNT_NAME="${DEFAULT_ACCOUNT}" # if old values exist in config @@ -246,8 +227,6 @@ _check_credentials() { ################################################### # check credentials for a given account name -# Globals: 3 functions -# Functions - _check_client, _check_refresh_token, _check_access_token # Arguments: 2 # ${1} = Account name # Result: read description, return 1 or 0 @@ -265,29 +244,30 @@ _check_account_credentials() { ################################################### # Check client id or secret and ask if required -# Globals: 4 variables, 3 functions -# Variables - CONFIG, QUIET, CLIENT_ID_${ACCOUNT_NAME}, CLIENT_SECRET_${ACCOUNT_NAME} -# Functions - _print_center, _update_config, _set_value # Arguments: 2 # ${1} = ID or SECRET # ${2} = Account name ( optional - if not given, then just CLIENT_[ID|SECRET] var is used ) # Result: read description and export ACCOUNT_name_CLIENT_[ID|SECRET] CLIENT_[ID|SECRET] ################################################### _check_client() { + export CONFIG QUIET type_check_client="CLIENT_${1:?Error: ID or SECRET}" account_name_check_client="${2:-}" - type_value_check_client="" type_regex_check_client="" && - unset type_name_check_client valid_check_client client_check_client message_check_client - export client_id_regex='[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com' client_secret_regex='[0-9A-Za-z_-]+' - type_name_check_client="${account_name_check_client:+ACCOUNT_${account_name_check_client}_}${type_check_client}" + unset type_value_check_client type_name_check_client valid_check_client client_check_client message_check_client regex_check_client + + # set regex for validation + if [ "${type_check_client}" = "CLIENT_ID" ]; then + regex_check_client='[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com' + else + regex_check_client='[0-9A-Za-z_-]+' + fi # set the type_value to the actual value of ACCOUNT_${account_name}_[ID|SECRET] + type_name_check_client="${account_name_check_client:+ACCOUNT_${account_name_check_client}_}${type_check_client}" _set_value indirect type_value_check_client "${type_name_check_client}" - # set the type_regex to the actual value of client_id_regex or client_secret_regex - _set_value indirect type_regex_check_client "${type_check_client}_regex" until [ -n "${type_value_check_client}" ] && [ -n "${valid_check_client}" ]; do [ -n "${type_value_check_client}" ] && { - if printf "%s\n" "${type_value_check_client}" | grep -qE "${type_regex_check_client}"; then + if _assert_regex "${regex_check_client}" "${type_value_check_client}"; then [ -n "${client_check_client}" ] && { _update_config "${type_name_check_client}" "${type_value_check_client}" "${CONFIG}" || return 1; } valid_check_client="true" && continue else @@ -312,24 +292,28 @@ _check_client() { ################################################### # Check refresh token and ask if required -# Globals: 8 variables, 4 functions -# Variables - CLIENT_ID, CLIENT_SECRET, REDIRECT_URI, TOKEN_URL, CONFIG, QUIET -# Functions - _set_value, _print_center, _update_config, _check_access_token # Arguments: 1 # ${1} = Account name ( optional - if not given, then just REFRESH_TOKEN var is used ) # Result: read description & export REFRESH_TOKEN ACCOUNT_${account_name}_REFRESH_TOKEN ################################################### _check_refresh_token() { + export CLIENT_ID CLIENT_SECRET QUIET CONFIG CURL_PROGRESS SCOPE REDIRECT_URI TOKEN_URL # bail out before doing anything if client id and secret is not present, unlikely to happen but just in case [ -z "${CLIENT_ID:+${CLIENT_SECRET}}" ] && return 1 account_name_check_refresh_token="${1:-}" refresh_token_regex='[0-9]//[0-9A-Za-z_-]+' authorization_code_regex='[0-9]/[0-9A-Za-z_-]+' - refresh_token_name_check_refresh_token="${account_name_check_refresh_token:+ACCOUNT_${account_name_check_refresh_token}_}REFRESH_TOKEN" - _set_value indirect refresh_token_value_check_refresh_token "${refresh_token_name_check_refresh_token}" + _set_value direct refresh_token_name_check_refresh_token "${account_name_check_refresh_token:+ACCOUNT_${account_name_check_refresh_token}_}REFRESH_TOKEN" + _set_value indirect refresh_token_value_check_refresh_token "${refresh_token_name_check_refresh_token:-}" + + # check if need to refetch refresh token whether one present or not + # checked when --oauth-refetch-refresh-token flag is used + [ "${REFETCH_REFRESH_TOKEN:-false}" = "true" ] && { + unset refresh_token_value_check_refresh_token + } [ -n "${refresh_token_value_check_refresh_token}" ] && { - ! printf "%s\n" "${refresh_token_value_check_refresh_token}" | grep -qE "${refresh_token_regex}" && + ! _assert_regex "${refresh_token_regex}" "${refresh_token_value_check_refresh_token}" && "${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token in config file, follow below steps.. " "-" && unset refresh_token_value_check_refresh_token } @@ -339,7 +323,7 @@ _check_refresh_token() { read -r refresh_token_value_check_refresh_token if [ -n "${refresh_token_value_check_refresh_token}" ]; then "${QUIET:-_print_center}" "normal" " Checking refresh token.. " "-" - if printf "%s\n" "${refresh_token_value_check_refresh_token}" | grep -qE "${refresh_token_regex}"; then + if _assert_regex "${refresh_token_regex}" "${refresh_token_value_check_refresh_token}"; then _set_value direct REFRESH_TOKEN "${refresh_token_value_check_refresh_token}" { _check_access_token "${account_name_check_refresh_token}" skip_check && _update_config "${refresh_token_name_check_refresh_token}" "${refresh_token_value_check_refresh_token}" "${CONFIG}" && @@ -359,7 +343,7 @@ _check_refresh_token() { unset AUTHORIZATION_CODE authorization_code AUTHORIZATION_CODE_VALID response until [ -n "${AUTHORIZATION_CODE}" ] && [ -n "${AUTHORIZATION_CODE_VALID}" ]; do [ -n "${AUTHORIZATION_CODE}" ] && { - if printf "%s\n" "${AUTHORIZATION_CODE}" | grep -qE "${authorization_code_regex}"; then + if _assert_regex "${authorization_code_regex}" "${AUTHORIZATION_CODE}"; then AUTHORIZATION_CODE_VALID="true" && continue else "${QUIET:-_print_center}" "normal" " Invalid CODE given, try again.. " "-" && unset AUTHORIZATION_CODE authorization_code @@ -395,9 +379,6 @@ _check_refresh_token() { ################################################### # Check access token and create/update if required # Also update in config -# Globals: 9 variables, 3 functions -# Variables - CLIENT_ID, CLIENT_SECRET, REFRESH_TOKEN, TOKEN_URL, CONFIG, API_URL, API_VERSION, QUIET -# Functions - _print_center, _update_config, _set_value # Arguments: 2 # ${1} = Account name ( if not given, then just ACCESS_TOKEN var is used ) # ${2} = if skip_check, then force create access token, else check with regex and expiry @@ -405,6 +386,7 @@ _check_refresh_token() { # Result: read description & export ACCESS_TOKEN ACCESS_TOKEN_EXPIRY ################################################### _check_access_token() { + export CLIENT_ID CLIENT_SECRET REFRESH_TOKEN CONFIG QUIET # bail out before doing anything if client id|secret or refresh token is not present, unlikely to happen but just in case [ -z "${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}}" ] && return 1 @@ -417,17 +399,18 @@ _check_access_token() { _set_value indirect token_value_check_access_token "${token_name_check_access_token}" _set_value indirect token_expiry_value_check_access_token "${token_expiry_name_check_access_token}" - [ "${no_check_check_access_token}" = skip_check ] || [ -z "${token_value_check_access_token}" ] || [ "${token_expiry_value_check_access_token:-0}" -lt "$(date +"%s")" ] || ! printf "%s\n" "${token_value_check_access_token}" | grep -qE "${access_token_regex}" && { + [ "${no_check_check_access_token}" = skip_check ] || [ -z "${token_value_check_access_token}" ] || [ "${token_expiry_value_check_access_token:-0}" -lt "$(_epoch)" ] || ! _assert_regex "${access_token_regex}" "${token_value_check_access_token}" && { response_check_access_token="${response_json_check_access_token:-$(curl --compressed -s -X POST --data \ "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")}" || : if token_value_check_access_token="$(printf "%s\n" "${response_check_access_token}" | _json_value access_token 1 1)"; then - token_expiry_value_check_access_token="$(($(date +"%s") + $(printf "%s\n" "${response_check_access_token}" | _json_value expires_in 1 1) - 1))" + token_expiry_value_check_access_token="$(($(_epoch) + $(printf "%s\n" "${response_check_access_token}" | _json_value expires_in 1 1) - 1))" _update_config "${token_name_check_access_token}" "${token_value_check_access_token}" "${CONFIG}" || return 1 _update_config "${token_expiry_name_check_access_token}" "${token_expiry_value_check_access_token}" "${CONFIG}" || return 1 else "${QUIET:-_print_center}" "justify" "Error: Something went wrong" ", printing error." "=" 1>&2 printf "%s\n" "${response_check_access_token}" 1>&2 + printf "%s\n" "If refresh token has expired, then use --oauth-refetch-refresh-token to refetch refresh token, if the error is not clear make a issue on github repository." return 1 fi } @@ -446,7 +429,8 @@ _check_access_token() { # uses global variable CONFIG ################################################### _reload_config() { - { [ -r "${CONFIG}" ] && . "${CONFIG}"; } || { printf "" >> "${CONFIG}" || return 1; } + export CONFIG + { [ -r "${CONFIG}" ] && _parse_config "${CONFIG}"; } || { printf "" >> "${CONFIG}" || return 1; } return 0 } @@ -454,19 +438,16 @@ _reload_config() { # launch a background service to check access token and update it # checks ACCESS_TOKEN_EXPIRY, try to update before 5 mins of expiry, a fresh token gets 60 mins # process will be killed when script exits or "${MAIN_PID}" is killed -# Globals: 4 variables, 1 function -# Variables - ACCESS_TOKEN, ACCESS_TOKEN_EXPIRY, MAIN_PID, TMPFILE -# Functions - _check_access_token -# Arguments: None # Result: read description & export ACCESS_TOKEN_SERVICE_PID ################################################### _token_bg_service() { + export MAIN_PID ACCESS_TOKEN ACCESS_TOKEN_EXPIRY TMPFILE [ -z "${MAIN_PID}" ] && return 0 # don't start if MAIN_PID is empty printf "%b\n" "ACCESS_TOKEN=\"${ACCESS_TOKEN}\"\nACCESS_TOKEN_EXPIRY=\"${ACCESS_TOKEN_EXPIRY}\"" >| "${TMPFILE}_ACCESS_TOKEN" { until ! kill -0 "${MAIN_PID}" 2>| /dev/null 1>&2; do . "${TMPFILE}_ACCESS_TOKEN" - CURRENT_TIME="$(date +"%s")" + CURRENT_TIME="$(_epoch)" REMAINING_TOKEN_TIME="$((ACCESS_TOKEN_EXPIRY - CURRENT_TIME))" if [ "${REMAINING_TOKEN_TIME}" -le 300 ]; then # timeout after 30 seconds, it shouldn't take too long anyway, and update tmp config diff --git a/sh/common-utils.sh b/src/common/common-utils.sh old mode 100755 new mode 100644 similarity index 81% rename from sh/common-utils.sh rename to src/common/common-utils.sh index 9795b2f..780418f --- a/sh/common-utils.sh +++ b/src/common/common-utils.sh @@ -1,15 +1,14 @@ #!/usr/bin/env sh -# Functions that will used in core script -# posix functions +# Common functions which will be used in both bash and posix scripts +# shellcheck source=/dev/null ################################################### # Convert bytes to human readable form -# Globals: None # Required Arguments: 1 # ${1} = Positive integer ( bytes ) # Result: Print human readable form. # Reference: -# https://unix.stackexchange.com/a/538015 +# https://unix.stackexchange.com/a/259254 ################################################### _bytes_to_human() { b_bytes_to_human="$(printf "%.0f\n" "${1:-0}")" s_bytes_to_human=0 @@ -27,34 +26,33 @@ _bytes_to_human() { ################################################### # Check if debug is enabled and enable command trace -# Globals: 2 variables, 1 function -# Varibles - DEBUG, QUIET -# Function - _is_terminal -# Arguments: None # Result: If DEBUG # Present - Enable command trace and change print functions to avoid spamming. # Absent - Disable command trace # Check QUIET, then check terminal size and enable print functions accordingly. ################################################### _check_debug() { + export DEBUG QUIET if [ -n "${DEBUG}" ]; then set -x && PS4='-> ' _print_center() { { [ $# = 3 ] && printf "%s\n" "${2}"; } || { printf "%s%s\n" "${2}" "${3}"; }; } - _clear_line() { :; } && _newline() { :; } + _clear_line() { :; } && _move_cursor() { :; } && _newline() { :; } else if [ -z "${QUIET}" ]; then # check if running in terminal and support ansi escape sequences if _support_ansi_escapes; then - ! COLUMNS="$(_get_columns_size)" || [ "${COLUMNS:-0}" -lt 45 ] 2>| /dev/null && + if ! _required_column_size; then _print_center() { { [ $# = 3 ] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - export CURL_PROGRESS="-#" EXTRA_LOG="_print_center" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" + + fi + export EXTRA_LOG="_print_center" CURL_PROGRESS="-#" CURL_PROGRESS_EXTRA="-#" SUPPORT_ANSI_ESCAPES="true" else _print_center() { { [ $# = 3 ] && printf "%s\n" "[ ${2} ]"; } || { printf "%s\n" "[ ${2}${3} ]"; }; } - _clear_line() { :; } + _clear_line() { :; } && _move_cursor() { :; } fi _newline() { printf "%b" "${1}"; } else - _print_center() { :; } && _clear_line() { :; } && _newline() { :; } + _print_center() { :; } && _clear_line() { :; } && _move_cursor() { :; } && _newline() { :; } fi set +x fi @@ -63,15 +61,12 @@ _check_debug() { ################################################### # Check internet connection. # Probably the fastest way, takes about 1 - 2 KB of data, don't check for more than 10 secs. -# Globals: 3 functions -# _print_center, _clear_line, _timeout -# Arguments: None # Result: On # Success - Nothing # Error - print message and exit 1 ################################################### _check_internet() { - "${EXTRA_LOG}" "justify" "Checking Internet Connection.." "-" + "${EXTRA_LOG:-}" "justify" "Checking Internet Connection.." "-" if ! _timeout 10 curl -Is google.com --compressed; then _clear_line 1 "${QUIET:-_print_center}" "justify" "Error: Internet connection" " not available." "=" @@ -82,7 +77,6 @@ _check_internet() { ################################################### # Move cursor to nth no. of line and clear it to the begining. -# Globals: None # Arguments: 1 # ${1} = Positive integer ( line number ) # Result: Read description @@ -93,7 +87,6 @@ _clear_line() { ################################################### # Alternative to dirname command -# Globals: None # Arguments: 1 # ${1} = path of file or folder # Result: read description @@ -102,7 +95,7 @@ _clear_line() { ################################################### _dirname() { dir_dirname="${1:-.}" - dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" && [ "${dir_dirname##*/*}" ] && dir_dirname=. + dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" && [ -n "${dir_dirname##*/*}" ] && dir_dirname=. dir_dirname="${dir_dirname%/*}" && dir_dirname="${dir_dirname%%"${dir_dirname##*[!/]}"}" printf '%s\n' "${dir_dirname:-/}" } @@ -110,7 +103,6 @@ _dirname() { ################################################### # Convert given time in seconds to readable form # 110 to 1 minute(s) and 50 seconds -# Globals: None # Arguments: 1 # ${1} = Positive Integer ( time in seconds ) # Result: read description @@ -127,22 +119,9 @@ _display_time() { printf '%d seconds\n' "${sec_display_time}" } -################################################### -# print column size -# use zsh or stty or tput -################################################### -_get_columns_size() { - { command -v bash 1>| /dev/null && bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1'; } || - { command -v zsh 1>| /dev/null && zsh -c 'printf "%s\n" "${COLUMNS}"'; } || - { command -v stty 1>| /dev/null && _tmp="$(stty size)" && printf "%s\n" "${_tmp##* }"; } || - { command -v tput 1>| /dev/null && tput cols; } || - return 1 -} - ################################################### # Fetch latest commit sha of release or branch # Do not use github rest api because rate limit error occurs -# Globals: None # Arguments: 3 # ${1} = "branch" or "release" # ${2} = branch name or release name @@ -150,6 +129,7 @@ _get_columns_size() { # Result: print fetched sha ################################################### _get_latest_sha() { + export TYPE TYPE_VALUE REPO unset latest_sha_get_latest_sha raw_get_latest_sha case "${1:-${TYPE}}" in branch) @@ -164,13 +144,13 @@ _get_latest_sha() { _tmp="$(printf "%s\n" "${raw_get_latest_sha}" | grep "=\"/""${3:-${REPO}}""/commit" -m1 || :)" && _tmp="${_tmp##*commit\/}" && printf "%s\n" "${_tmp%%\"*}" )" ;; + *) : ;; esac printf "%b" "${latest_sha_get_latest_sha:+${latest_sha_get_latest_sha}\n}" } ################################################### # Encode the given string to parse properly as json -# Globals: None # Arguments: 2 # ${1} = json or something else # ${2} = input @@ -202,7 +182,6 @@ _json_escape() { ################################################### # Method to extract specified field data from json -# Globals: None # Arguments: 2 # ${1} - value of field to fetch from json # ${2} - Optional, no of lines to parse for the given field in 1st arg @@ -217,15 +196,63 @@ _json_value() { { [ "${3}" -gt 0 ] 2>| /dev/null && num_json_value="${3}"; } || { ! [ "${3}" = all ] && num_json_value=1; } # shellcheck disable=SC2086 _tmp="$(grep -o "\"${1}\"\:.*" ${no_of_lines_json_value:+-m} ${no_of_lines_json_value})" || return 1 - printf "%s\n" "${_tmp}" | sed -e "s/.*\"""${1}""\"://" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num_json_value}"p || : + printf "%s\n" "${_tmp}" | sed -e "s|.*\"""${1}""\":||" -e 's/[",]*$//' -e 's/["]*$//' -e 's/[,]*$//' -e "s/^ //" -e 's/^"//' -n -e "${num_json_value}"p || : + return 0 +} + +################################################### +# Function to parse config in format a=b +# Arguments: 2 +# ${1} - path to config file +# ${2} - optional, if true will print the config +# Input: file +# _parse_config file +# Result: all the values in the config file will be exported as variables +################################################### +_parse_config() { + _config_file_parse_config="${1:?Error: Profile config file}" + print_parse_config="${2:-false}" + + # check if the config file accessible + [ -r "${_config_file_parse_config}" ] || { + printf "%s\n" "Error: Given config file ( ${_config_file_parse_config} ) is not readable." + return 1 + } + + # Setting 'IFS' tells 'read' where to split the string. + while IFS='=' read -r key val; do + # Skip Lines starting with '#' + # Also skip lines if key and val variable is empty + { [ -n "${key}" ] && [ -n "${val}" ] && [ -n "${key##\#*}" ]; } || continue + + # trim all leading white space + key="${key#"${key%%[![:space:]]*}"}" + val="${val#"${val%%[![:space:]]*}"}" + + # trim all trailing white space + key="${key%"${key##*[![:space:]]}"}" + val="${val%"${val##*[![:space:]]}"}" + + # trim the first and last qoute if present on both sides + case "${val}" in + \"*\") val="${val#\"}" val="${val%\"}" ;; + \'*\') val="${val#\'}" val="${val%\'}" ;; + *) : ;; + esac + + # '$key' stores the key and '$val' stores the value. + # Throw a warning if cannot export the variable + export "${key}=${val}" 2> /dev/null || printf "%s\n" "Warning: ${key} is not a valid variable name." + + [ "${print_parse_config}" = true ] && echo "${key}=${val}" + done < "${_config_file_parse_config}" + return 0 } ################################################### # Print a text to center interactively and fill the rest of the line with text specified. # This function is fine-tuned to this script functionality, so may appear unusual. -# Globals: 1 variable -# COLUMNS # Arguments: 4 # If ${1} = normal # ${2} = text to print @@ -243,7 +270,7 @@ _json_value() { ################################################### _print_center() { [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 - term_cols_print_center="${COLUMNS}" + term_cols_print_center="${COLUMNS:-}" type_print_center="${1}" filler_print_center="" case "${type_print_center}" in normal) out_print_center="${2}" && symbol_print_center="${3}" ;; @@ -286,46 +313,28 @@ _print_center() { } ################################################### -# Quiet version of _print_center +# print_center arguments but normal print ################################################### _print_center_quiet() { - { [ $# = 3 ] && printf "%s\n" "${2}"; } || printf "%s%s\n" "${2}" "${3}" -} - -################################################### -# Evaluates value1=value2 -# Globals: None -# Arguments: 3 -# ${1} = direct ( d ) or indirect ( i ) - ( evaluation mode ) -# ${2} = var name -# ${3} = var value -# Result: export value1=value2 -################################################### -_set_value() { - case "${1:?}" in - d | direct) export "${2:?}=${3}" ;; - i | indirect) export "${2:?}=$(eval printf "%s" \"\$"${3}"\")" ;; - esac + { [ $# = 3 ] && printf "%s\n" "${2}"; } || + { printf "%s%s\n" "${2}" "${3}"; } } ################################################### # Check if script terminal supports ansi escapes -# Globals: 1 variable -# TERM -# Arguments: None # Result: return 1 or 0 ################################################### _support_ansi_escapes() { unset ansi_escapes - case "${TERM}" in + case "${TERM:-}" in xterm* | rxvt* | urxvt* | linux* | vt* | screen*) ansi_escapes="true" ;; + *) : ;; esac { [ -t 2 ] && [ -n "${ansi_escapes}" ] && return 0; } || return 1 } ################################################### # Alternative to timeout command -# Globals: None # Arguments: 1 and rest # ${1} = amount of time to sleep # rest = command to execute @@ -350,7 +359,6 @@ _timeout() { ################################################### # Config updater # Incase of old value, update, for new value add. -# Globals: None # Arguments: 3 # ${1} = value name # ${2} = value @@ -361,26 +369,9 @@ _update_config() { [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 value_name_update_config="${1}" value_update_config="${2}" config_path_update_config="${3}" ! [ -f "${config_path_update_config}" ] && : >| "${config_path_update_config}" # If config file doesn't exist. - chmod u+w "${config_path_update_config}" || return 1 - printf "%s\n%s\n" "$(grep -v -e "^$" -e "^${value_name_update_config}=" "${config_path_update_config}" || :)" \ + chmod u+w -- "${config_path_update_config}" || return 1 + printf "%s\n%s\n" "$(grep -v -e "^$" -e "^${value_name_update_config}=" -- "${config_path_update_config}" || :)" \ "${value_name_update_config}=\"${value_update_config}\"" >| "${config_path_update_config}" || return 1 - chmod a-w-r-x,u+r "${config_path_update_config}" || return 1 + chmod a-w-r-x,u+r -- "${config_path_update_config}" || return 1 return 0 } - -################################################### -# Encode the given string to parse properly in network requests -# Globals: None -# Arguments: 1 -# ${1} = string -# Result: print encoded string -# Reference: -# https://stackoverflow.com/a/41405682 -################################################### -_url_encode() ( - LC_ALL=C LANG=C - awk 'BEGIN {while (y++ < 125) z[sprintf("%c", y)] = y - while (y = substr(ARGV[1], ++j, 1)) - q = y ~ /[[:alnum:]]_.!~*\47()-]/ ? q y : q sprintf("%%%02X", z[y]) - print q}' "${1}" -) diff --git a/sh/drive-utils.sh b/src/common/drive-utils.sh old mode 100755 new mode 100644 similarity index 90% rename from sh/drive-utils.sh rename to src/common/drive-utils.sh index 426b724..21c3cc5 --- a/sh/drive-utils.sh +++ b/src/common/drive-utils.sh @@ -2,9 +2,6 @@ ################################################### # Search for an existing file on gdrive with write permission. -# Globals: 3 variables, 2 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value # Arguments: 4 # ${1} = file name # ${2} = root dir id of file @@ -18,6 +15,7 @@ # https://developers.google.com/drive/api/v3/search-files ################################################### _check_existing_file() ( + export EXTRA_LOG CURL_PROGRESS_EXTRA API_URL API_VERSION [ $# -lt 2 ] && printf "Missing arguments\n" && return 1 name_check_existing_file="${1}" rootdir_check_existing_file="${2}" mode_check_existing_file="${3}" param_value_check_existing_file="${4}" unset query_check_existing_file response_check_existing_file id_check_existing_file @@ -41,9 +39,6 @@ _check_existing_file() ( ################################################### # Copy/Clone a public gdrive file/folder from another/same gdrive account -# Globals: 6 variables, 6 functions -# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line, _json_escape # Arguments: 5 # ${1} = update or upload ( upload type ) # ${2} = file id to upload @@ -58,10 +53,11 @@ _check_existing_file() ( # https://developers.google.com/drive/api/v2/reference/files/copy ################################################### _clone_file() { + export DESCRIPTION_FILE CHECK_MODE SKIP_DUPLICATES QUIET API_URL API_VERSION CURL_PROGRESS [ $# -lt 5 ] && printf "Missing arguments\n" && return 1 job_clone_file="${1}" file_id_clone_file="${2}" file_root_id_clone_file="${3}" name_clone_file="${4}" size_clone_file="${5}" md5_clone_file="${6}" unset post_data_clone_file response_clone_file readable_size_clone_file description_clone_file && STRING="Cloned" - readable_size_clone_file="$(printf "%s\n" "${size_clone_file}" | _bytes_to_human)" + readable_size_clone_file="$(_bytes_to_human "${size_clone_file}")" escaped_name_clone_file="$(_json_escape j "${name_clone_file}")" print_name_clone_file="$(_json_escape p "${name_clone_file}")" # create description data @@ -79,6 +75,7 @@ _clone_file() { case "${CHECK_MODE}" in 2) check_value_type_clone_file="size" check_value_clone_file="${size_clone_file}" ;; 3) check_value_type_clone_file="md5Checksum" check_value_clone_file="${md5_clone_file}" ;; + *) : ;; esac # Check if file actually exists. if file_check_json_clone_file="$(_check_existing_file "${escaped_name_clone_file}" "${file_root_id_clone_file}" "${check_value_type_clone_file}" "${check_value_clone_file}")"; then @@ -121,9 +118,6 @@ _clone_file() { ################################################### # Create/Check directory in google drive. -# Globals: 3 variables, 3 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value, _json_escape # Arguments: 2 # ${1} = dir name # ${2} = root dir id of given dir @@ -132,6 +126,7 @@ _clone_file() { # https://developers.google.com/drive/api/v3/folder ################################################### _create_directory() { + export EXTRA_LOG CURL_PROGRESS_EXTRA API_VERSION API_URL [ $# -lt 2 ] && printf "Missing arguments\n" && return 1 dirname_create_directory="${1##*/}" rootdir_create_directory="${2}" unset query_create_directory search_response_create_directory folder_id_create_directory @@ -162,9 +157,6 @@ _create_directory() { ################################################### # Get information for a gdrive folder/file. -# Globals: 3 variables, 1 function -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _json_value # Arguments: 2 # ${1} = folder/file gdrive id # ${2} = information to fetch, e.g name, id @@ -175,6 +167,7 @@ _create_directory() { # https://developers.google.com/drive/api/v3/search-files ################################################### _drive_info() { + export EXTRA_LOG CURL_PROGRESS_EXTRA API_URL API_VERSION [ $# -lt 2 ] && printf "Missing arguments\n" && return 1 folder_id_drive_info="${1}" fetch_drive_info="${2}" unset search_response_drive_info @@ -190,7 +183,6 @@ _drive_info() { ################################################### # Extract ID from a googledrive folder/file url. -# Globals: None # Arguments: 1 # ${1} = googledrive folder/file url. # Result: print extracted ID @@ -202,6 +194,7 @@ _extract_id() { *'drive.google.com'*'id='*) _tmp="${id_extract_id##*id=}" && _tmp="${_tmp%%\?*}" && id_extract_id="${_tmp%%\&*}" ;; *'drive.google.com'*'file/d/'* | 'http'*'docs.google.com'*'/d/'*) _tmp="${id_extract_id##*\/d\/}" && _tmp="${_tmp%%\/*}" && _tmp="${_tmp%%\?*}" && id_extract_id="${_tmp%%\&*}" ;; *'drive.google.com'*'drive'*'folders'*) _tmp="${id_extract_id##*\/folders\/}" && _tmp="${_tmp%%\?*}" && id_extract_id="${_tmp%%\&*}" ;; + *) : ;; esac printf "%b" "${id_extract_id:+${id_extract_id}\n}" } @@ -209,11 +202,6 @@ _extract_id() { ################################################### # Upload ( Create/Update ) files on gdrive. # Interrupted uploads can be resumed. -# Globals: 8 variables, 11 functions -# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN, DESCRIPTION_FILE -# Functions - _url_encode, _json_value, _json_escape, _print_center, _bytes_to_human, _check_existing_file -# _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session -# _full_upload, _collect_file_info # Arguments: 3 # ${1} = update or upload ( upload type ) # ${2} = file to upload @@ -227,6 +215,7 @@ _extract_id() { # https://developers.google.com/drive/api/v3/reference/files/update ################################################### _upload_file() { + export QUIET DESCRIPTION_FILE CHECK_MODE SKIP_DUPLICATES API_URL API_VERSION INFO_PATH [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 job_upload_file="${1}" input_upload_file="${2}" folder_id_upload_file="${3}" unset slug_upload_file inputname_upload_file extension_upload_file inputsize_upload_file readable_size_upload_file request_method_upload_file \ @@ -238,7 +227,7 @@ _upload_file() { inputname_upload_file="${slug_upload_file%.*}" extension_upload_file="${slug_upload_file##*.}" inputsize_upload_file="$(($(wc -c < "${input_upload_file}")))" && content_length_upload_file="${inputsize_upload_file}" - readable_size_upload_file="$(printf "%s\n" "${inputsize_upload_file}" | _bytes_to_human)" + readable_size_upload_file="$(_bytes_to_human "${inputsize_upload_file}")" # Handle extension-less files [ "${inputname_upload_file}" = "${extension_upload_file}" ] && { @@ -269,14 +258,15 @@ _upload_file() { } check_value_upload_file="${check_value_upload_file%% *}" ;; + *) : ;; esac # Check if file actually exists, and create if not. if file_check_json_upload_file="$(_check_existing_file "${escaped_slug_upload_file}" "${folder_id_upload_file}" "${check_value_type_upload_file}" "${check_value_upload_file}")"; then if [ -n "${SKIP_DUPLICATES}" ]; then # Stop upload if already exists ( -d/--skip-duplicates ) _collect_file_info "${file_check_json_upload_file}" "${print_slug_upload_file}" || return 1 - _clear_line 1 - "${QUIET:-_print_center}" "justify" "${print_slug_upload_file} already exists." "=" && return 0 + STRING="Skipped" _normal_logging_upload + return 0 else request_method_upload_file="PATCH" _file_id_upload_file="$(printf "%s\n" "${file_check_json_upload_file}" | _json_value id 1 1)" || @@ -310,7 +300,7 @@ _upload_file() { uploaded_range_upload_file="$(raw_upload_file="$(curl --compressed -s -X PUT \ -H "Content-Range: bytes */${content_length_upload_file}" \ --url "${uploadlink_upload_file}" --globoff -D - || :)" && - printf "%s\n" "${raw_upload_file##*[R,r]ange: bytes=0-}" | while read -r line; do printf "%s\n" "${line%%$(printf '\r')}" && break; done)" + printf "%s\n" "${raw_upload_file##*[R,r]ange: bytes=0-}" | while read -r line; do printf "%s\n" "${line%%"$(printf '\r')"}" && break; done)" if [ "${uploaded_range_upload_file}" -gt 0 ] 2>| /dev/null; then _print_center "justify" "Resuming interrupted upload.." "-" && _newline "\n" content_range_upload_file="$(printf "bytes %s-%s/%s\n" "$((uploaded_range_upload_file + 1))" "$((inputsize_upload_file - 1))" "${inputsize_upload_file}")" @@ -334,6 +324,7 @@ _upload_file() { _normal_logging_upload _remove_upload_session ;; + *) : ;; esac else _full_upload || return 1 @@ -345,20 +336,20 @@ _upload_file() { # Sub functions for _upload_file function - Start # generate resumable upload link _generate_upload_link() { - "${EXTRA_LOG}" "justify" "Generating upload link.." "-" 1>&2 - uploadlink_upload_file="$(_api_request "${CURL_PROGRESS_EXTRA}" \ + "${EXTRA_LOG:-}" "justify" "Generating upload link.." "-" 1>&2 + uploadlink_upload_file="$(_api_request "${CURL_PROGRESS_EXTRA:-}" \ -X "${request_method_upload_file}" \ -H "Content-Type: application/json; charset=UTF-8" \ -H "X-Upload-Content-Type: ${mime_type_upload_file}" \ -H "X-Upload-Content-Length: ${inputsize_upload_file}" \ - -d "$postdata_upload_file" \ + -d "${postdata_upload_file}" \ "${url_upload_file}" \ -D - || :)" && _clear_line 1 1>&2 _clear_line 1 1>&2 case "${uploadlink_upload_file}" in - *'ocation: '*'upload_id'*) uploadlink_upload_file="$(printf "%s\n" "${uploadlink_upload_file##*[L,l]ocation: }" | while read -r line; do printf "%s\n" "${line%%$(printf '\r')}" && break; done)" && return 0 ;; - '' | *) return 1 ;; + *'ocation: '*'upload_id'*) uploadlink_upload_file="$(printf "%s\n" "${uploadlink_upload_file##*[L,l]ocation: }" | while read -r line; do printf "%s\n" "${line%%"$(printf '\r')"}" && break; done)" && return 0 ;; + *) return 1 ;; esac return 0 @@ -367,8 +358,8 @@ _generate_upload_link() { # Curl command to push the file to google drive. _upload_file_from_uri() { _print_center "justify" "Uploading.." "-" - # shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic. - upload_body_upload_file="$(_api_request ${CURL_PROGRESS} \ + # shellcheck disable=SC2086,SC2248 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic. + upload_body_upload_file="$(_api_request ${CURL_PROGRESS:-} \ -X PUT \ -H "Content-Type: ${mime_type_upload_file}" \ -H "Content-Length: ${content_length_upload_file}" \ @@ -377,16 +368,16 @@ _upload_file_from_uri() { -o- \ --url "${uploadlink_upload_file}" \ --globoff \ - ${CURL_SPEED} ${resume_args1_upload_file} ${resume_args2_upload_file} \ + ${CURL_SPEED:-} ${resume_args1_upload_file:-} ${resume_args2_upload_file:-} \ -H "${resume_args3_upload_file}" || :)" - [ -z "${VERBOSE_PROGRESS}" ] && for _ in 1 2; do _clear_line 1; done && "${1:-:}" + [ -z "${VERBOSE_PROGRESS:-}" ] && for _ in 1 2; do _clear_line 1; done && "${1:-:}" return 0 } # logging in case of successful upload _normal_logging_upload() { - [ -z "${VERBOSE_PROGRESS}" ] && _clear_line 1 - "${QUIET:-_print_center}" "justify" "${slug_upload_file} " "| ${readable_size_upload_file} | ${STRING}" "=" + [ -z "${VERBOSE_PROGRESS:-}" ] && _clear_line 1 + "${QUIET:-_print_center}" "justify" "${slug_upload_file} " "| ${readable_size_upload_file} | ${STRING:-}" "=" return 0 } @@ -417,9 +408,6 @@ _full_upload() { ################################################### # Share a gdrive file/folder -# Globals: 3 variables, 4 functions -# Variables - API_URL, API_VERSION, ACCESS_TOKEN -# Functions - _url_encode, _json_value, _print_center, _clear_line # Arguments: 2 # ${1} = gdrive ID of folder/file # ${2} = Email to which file will be shared ( optional ) diff --git a/src/common/parser.sh b/src/common/parser.sh new file mode 100644 index 0000000..34e0ffd --- /dev/null +++ b/src/common/parser.sh @@ -0,0 +1,416 @@ +#!/usr/bin/env sh +# shellcheck source=/dev/null + +################################################### +# check if the given fd is open +# Arguments: +# ${1} = fd number +# return 1 or 0 +################################################### +_is_fd_open() { + for fd in ${1:?}; do + # shellcheck disable=SC3021 + if ! { true >&"${fd}"; } 2<> /dev/null; then + printf "%s\n" "Error: fd ${fd} not open." + return 1 + fi + done +} + +################################################### +# append help text to all the help variable +################################################### +_parser_add_help() { + # append current flag help content to _PARSER_ALL_HELP + _PARSER_ALL_HELP="${_PARSER_ALL_HELP} +${__PARSER_BAR:-} +${1:-}" 2>| /dev/null + # redirect to /dev/null as this will spam horribly in -x mode +} + +################################################### +# check whether the given flag has been provided the required num of arguments +# to be used within flag functions +# Arguments: +# ${1} = num of args +# ${2} = all of the args +# return 0 or 1 with usage for the flag +################################################### +_parser_check_arguments() { + nargs_parser_check_arguments="$((${1:?_parser_check_arguments}))" + # because first argunent is num of args and second is the flag itself + num_parser_check_arguments=$(($# - 2)) + + [ "${num_parser_check_arguments}" -lt "${nargs_parser_check_arguments}" ] && { + printf "%s\n" "${0##*/}: ${2}: flag requires ${nargs_parser_check_arguments} argument." + printf "\n%s\n" "Help:" + # print help for the respective flag + printf "%s\n" "$(_usage "${2}")" + exit 1 + } + return 0 +} + +################################################### +# check if the given flag exists and set function name to a variae +# Arguments: +# ${1} = flag +# ${2} = var which will be set to function name +# example: +# input: -p var1 +# output: +# set var1 to "__flag_p" +################################################### +_flag_exists() { + tmp_flag_exists="" option_flag_exists="" + # use _flag_help function get the help contents and function name + _flag_help "${1:?}" tmp_flag_exists option_flag_exists + # then check if help is empty or not + [ -z "${tmp_flag_exists}" ] && return 1 + _set_value d "${2:?}" "${option_flag_exists}" +} + +################################################### +# fetch flag help and set the contents to a variable +# Arguments: +# ${1} = flag +# ${2} = var which will be set to help contents +# ${3} = optional, var which will be set to flag name without dashes +# example: +# input: -p var1 var2 +# output: +# set var1 to "${_parser__help_p}" +# set var2 to p +################################################### +_flag_help() { + flag_flag_help="" + # remove the dashes from the flags + _trim "-" "${1:?_flag_help}" flag_flag_help + _set_value i "${2:?_flag_help}" "_parser__help_${flag_flag_help}" + _set_value d "${3:-_}" "${flag_flag_help}" +} + +################################################### +# parse the given arguments as flags or normal input +# Arguments: +# ${1} = function which will be executed to setup the flags +# ${@} = the inputs to process +# example: _parse_arguments setup_flags 1 2 3 +################################################### +_parse_arguments() { + __NEWLINE=" +" + + # Check if script terminal supports ansi escapes | Result: return 1 or 0 + _parse_support_ansi_escapes() { + case "${TERM}" in + xterm* | rxvt* | urxvt* | linux* | vt* | screen*) { [ -t 2 ] && return 0; } || return 1 ;; + *) : ;; + esac + { [ -t 2 ] && return 0; } || return 1 + } + # fetch column size and check if greater than the num ( see in function) | return 1 or 0 + _parser_required_column_size() { + COLUMNS="$({ command -v bash 1>| /dev/null && bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1'; } || + { command -v zsh 1>| /dev/null && zsh -c 'printf "%s\n" "${COLUMNS}"'; } || + { command -v stty 1>| /dev/null && _tmp="$(stty size)" && printf "%s\n" "${_tmp##* }"; } || + { command -v tput 1>| /dev/null && tput cols; })" || : + + [ "$((COLUMNS))" -gt 45 ] && return 0 + } + + # check if running in terminal and support ansi escape sequences + _parse_support_ansi_escapes && + _parser_required_column_size && + __PARSER_BAR="$( + filler='' symbol='_' + i=1 && while [ "${i}" -le "${COLUMNS}" ]; do + filler="${filler}${symbol}" && i="$((i + 1))" + done + printf "%s\n" "${filler}" + )" + + # export a __PARSER_BAR variable which is used in _add_flag function + __PARSER_BAR="${__PARSER_BAR:+${__PARSER_BAR}${__NEWLINE}}" + # just a variable containing a newline + + ########################## + # these global variables are actually used when _parser_setup_flags is running + # _PARSER_ALL_HELP contains all the help + # _PARSER_ARGS_SHIFT contains the num of shift to be done for each arg + # _PARSER_PREPROCESS_FUNCTION contains preprocess function contents + unset _PARSER_ALL_HELP _PARSER_ARGS_SHIFT _PARSER_PREPROCESS_FUNCTION + # these flags are exported in _parser_setup_flag + unset _PARSER_FLAGS _PARSER_CURRENT_FLAGS _PARSER_CURRENT_NARGS _PARSER_CURRENT_ARGS _PARSER_CURRENT_ARGS_TYPE + ########################## + + # this will initialize help text and flag functions + "${1:?_parse_arguments - 1: Missing funtion name to setup flags}" || return 1 + shift 2>| /dev/null + + # run the code required to run before parsing the arguments + _parser_run_preprocess || return 1 + + # TODO: remove usage of shift + while [ "${#}" -gt 0 ]; do + case "${1}" in + # just ignore empty inputs + '') : ;; + --) + shift + while [ "${#}" -gt 0 ]; do + _parser_process_input "${@}" || return 1 + shift + done + ;; + -*) + flag_parse_arguments="" + if _flag_exists "${1}" flag_parse_arguments; then + "_parser_process_${flag_parse_arguments}" "${@}" || return 1 + else + printf "%s\n\n" "${0##*/}: ${1}: Unknown option" + _short_help + fi + ;; + # anything not starting with - is added to be processed later + *) + _parser_process_input "${@}" || return 1 + ;; + esac + # add 1 shift for the current argument + _PARSER_ARGS_SHIFT="$((_PARSER_ARGS_SHIFT + 1))" + # now shift the arguments + shift "${_PARSER_ARGS_SHIFT}" + # reset the shift + _PARSER_ARGS_SHIFT="0" + done + return 0 +} + +################################################### +# Remove the dashes from flags and set some global vars +# Arguments: +# ${1} = flags seperated by space +# ${2} = num of args required by the flag +# ${3} = optional -> argument type - optional or required +# ${4} = optional -> argument help text +# example: +# input = "-p --parallel" 1 required "no of parallel downloads" +################################################### +_parser_setup_flag() { + _PARSER_CURRENT_FLAGS="" tmp_parser_setup_flag="" + _PARSER_FLAGS="${1:?_parser_setup_flag}" + for f in ${_PARSER_FLAGS}; do + _trim "-" "${f}" tmp_parser_setup_flag + _PARSER_CURRENT_FLAGS="${_PARSER_CURRENT_FLAGS} ${tmp_parser_setup_flag}" + done + _PARSER_CURRENT_NARGS="${2:?_parser_setup_flag}" + _PARSER_CURRENT_ARGS_TYPE="${3}" + _PARSER_CURRENT_ARGS="${4}" +} + +################################################### +# set flag help variable +# uses global variabled exported in _parser_setup_flag function +# Arguments: +# ${1} = help contents +# set _parser__help_${flag_name} variable with help contents +# example: assuming "-p --parallel-jobs" 1 required "no of parallel downloads" was given to _parser_setup_flag +# input: _parser_setup_flag_help "Download multiple files in parallel." +# output: set _parser__help_p and _parser__help_paralleljobs +# help text: +# -p | --parallel-jobs "num of parallel downloads" [ Required ] +# +# Download multiple files in parallel. +################################################### +_parser_setup_flag_help() { + flags_parser_setup_flag_help="${_PARSER_CURRENT_FLAGS:?_parser_setup_flag_help}" + nargs_parser_setup_flag_help="${_PARSER_CURRENT_NARGS:?_parser_setup_flag_help}" + unset start_parser_setup_flag_help \ + help_parser_setup_flag_help \ + arg_parser_setup_flag_help \ + all_parser_setup_flag_help + + # run loop to add the indentation + while IFS= read -r line <&4; do + # 8 spaces + help_parser_setup_flag_help="${help_parser_setup_flag_help} + ${line}" + done 4<< EOF +${1:?_parser_setup_flag_help} +EOF + + # add as a prefix on first help line + for f in ${_PARSER_FLAGS:?_parser_setup_flag_help}; do + # format as -p | --parallel + start_parser_setup_flag_help="${start_parser_setup_flag_help:+${start_parser_setup_flag_help} | }${f}" + done + + # check if to add argument help + if ! [ "${nargs_parser_setup_flag_help}" = 0 ]; then + # argument help should be inside double qoutes + arg_parser_setup_flag_help="\"${_PARSER_CURRENT_ARGS:?_parser_setup_flag_help}\"" + # check if to add optional or required string + # -p | --parallel-jobs "num of parallel downloads" [ Required ] + if [ "${_PARSER_CURRENT_ARGS_TYPE}" = optional ]; then + arg_parser_setup_flag_help="${arg_parser_setup_flag_help} [ Optional ]" + else + arg_parser_setup_flag_help="${arg_parser_setup_flag_help} [ Required ]" + fi + fi + + # add argument help to help, prepend 4 spaces + start_parser_setup_flag_help=" ${start_parser_setup_flag_help} ${arg_parser_setup_flag_help}" + + # concatenate all the help text + all_setup_help_flag="${start_parser_setup_flag_help}${__NEWLINE:?}${help_parser_setup_flag_help}" + + for f in ${flags_parser_setup_flag_help}; do + # create _parser__help_p or _parser__help_paralleljobs var containing "help contents" + _set_value d "_parser__help_${f}" "${all_setup_help_flag}" + done + + # don't add to help of when given flag is input + [ "${_PARSER_FLAGS}" = input ] && return 0 + + # append current flag help content to _PARSER_ALL_HELP + _PARSER_ALL_HELP="${_PARSER_ALL_HELP} +${__PARSER_BAR:-} +${all_setup_help_flag}" 2>| /dev/null + # redirect to /dev/null as this will spam horribly in -x mode +} + +################################################### +# append the given input to _PARSER_PREPROCESS_FUNCTION +# will be executed before parsing the arguments in _parser_run_preprocess function +# Arguments: +# 4< num of shifts to do +################################################### +_parser_shift() { + export _PARSER_ARGS_SHIFT="${1:-1}" +} + +_short_help() { + printf "No valid arguments provided, use -h/--help flag to see usage.\n" + exit 0 +} + +################################################### +# Evaluates value1=value2 +# Arguments: 3 +# ${1} = direct ( d ) or indirect ( i ) - ( evaluation mode ) +# ${2} = var name +# ${3} = var value +# Result: export value1=value2 +################################################### +_set_value() { + case "${1:?}" in + d | direct) export "${2:?}=${3}" ;; + i | indirect) eval export "${2}"=\"\$"${3}"\" ;; + *) return 1 ;; + esac +} + +################################################### +# remove the given character from the given string +# 1st arg - character +# 2nd arg - string +# 3rd arg - var where to save the output +# print trimmed string if 3rd arg empty else set +# Reference: https://stackoverflow.com/a/65350253 +################################################### +_trim() { + char_trim="${1}" str_trim="${2}" var_trim="${3}" + # Disable globbing. + # This ensures that the word-splitting is safe. + set -f + # store old ifs, restore it later. + old_ifs="${IFS}" + IFS="${char_trim}" + # shellcheck disable=SC2086 + set -- ${str_trim} + IFS= + if [ -n "${var_trim}" ]; then + _set_value d "${var_trim}" "$*" + else + printf "%s" "$*" + fi + # Restore the value of 'IFS'. + IFS="${old_ifs}" + # re enable globbing + set +f +} diff --git a/src/common/sync-flags.sh b/src/common/sync-flags.sh new file mode 100644 index 0000000..0617957 --- /dev/null +++ b/src/common/sync-flags.sh @@ -0,0 +1 @@ +#!/usr/bin/env sh diff --git a/src/common/update.sh b/src/common/update.sh new file mode 100644 index 0000000..f571e92 --- /dev/null +++ b/src/common/update.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env sh + +################################################### +# Automatic updater, only update if script is installed system wide. +# Result: On +# Update if AUTO_UPDATE_INTERVAL + LAST_UPDATE_TIME less than printf "%(%s)T\\n" "-1" +################################################### +_auto_update() { + export COMMAND_NAME INSTALL_PATH TYPE TYPE_VALUE REPO LAST_UPDATE_TIME AUTO_UPDATE_INTERVAL + command -v "${COMMAND_NAME}" 1> /dev/null && + if [ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]; then + current_time="$(_epoch)" + [ "$((LAST_UPDATE_TIME + AUTO_UPDATE_INTERVAL))" -lt "$(_epoch)" ] && _update update + _update_value LAST_UPDATE_TIME "${current_time}" + fi + return 0 +} + +################################################### +# Install/Update/uninstall the script. +# Arguments: 1 +# ${1} = uninstall or update +# Result: On +# ${1} = nothing - Update the script if installed, otherwise install. +# ${1} = uninstall - uninstall the script +################################################### +_update() { + job_update="${1:-update}" + [ "${GLOBAL_INSTALL:-}" = true ] && ! [ "$(id -u)" = 0 ] && printf "%s\n" "Error: Need root access to update." && return 0 + [ "${job_update}" = uninstall ] && job_uninstall="--uninstall" + _print_center "justify" "Fetching ${job_update} script.." "-" + repo_update="${REPO:-labbots/google-drive-upload}" type_value_update="${TYPE_VALUE:-latest}" cmd_update="${COMMAND_NAME:-gupload}" path_update="${INSTALL_PATH:-${HOME}/.gdrive-downloader/bin}" + { [ "${TYPE:-}" != branch ] && type_value_update="$(_get_latest_sha release "${type_value_update}" "${repo_update}")"; } || : + if script_update="$(curl --compressed -Ls "https://github.com/${repo_update}/raw/${type_value_update}/install.sh")"; then + _clear_line 1 + + # check if the downloaded script has any syntax errors + printf "%s\n" "${script_update}" | sh -n || { + printf "%s\n" "Install script downloaded but malformed, try again and if the issue persists open an issue on github." + return 1 + } + # shellcheck disable=SC2248 + printf "%s\n" "${script_update}" | sh -s -- ${job_uninstall:-} --skip-internet-check --cmd "${cmd_update}" --path "${path_update}" + current_time="$(date +'%s')" + [ -z "${job_uninstall}" ] && _update_value LAST_UPDATE_TIME "${current_time}" + else + _clear_line 1 + "${QUIET:-_print_center}" "justify" "Error: Cannot download" " ${job_update} script." "=" 1>&2 + return 1 + fi + return 0 +} + +################################################### +# Update in-script values +################################################### +_update_value() { + command_path="${INSTALL_PATH:?}/${COMMAND_NAME:?}" + value_name="${1:?}" value="${2:-}" + script_without_value_and_shebang="$(grep -v "${value_name}=\".*\".* # added values" -- "${command_path}" | sed 1d)" + new_script="$( + sed -n 1p -- "${command_path}" + printf "%s\n" "${value_name}=\"${value}\" # added values" + printf "%s\n" "${script_without_value_and_shebang}" + )" + # check if the downloaded script has any syntax errors + printf "%s\n" "${new_script}" | "${INSTALLATION:-bash}" -n || { + printf "%s\n" "Update downloaded but malformed, try again and if the issue persists open an issue on github." + return 1 + } + chmod u+w -- "${command_path}" && printf "%s\n" "${new_script}" >| "${command_path}" && chmod "a-w-r-x,${PERM_MODE:-u}+r+x" -- "${command_path}" + return 0 +} diff --git a/sh/upload.sh b/src/common/upload-common.sh old mode 100755 new mode 100644 similarity index 53% rename from sh/upload.sh rename to src/common/upload-common.sh index 70ada8b..31d7d9a --- a/sh/upload.sh +++ b/src/common/upload-common.sh @@ -1,91 +1,12 @@ #!/usr/bin/env sh -# Upload a file to Google Drive +# helper functions related to main upload script # shellcheck source=/dev/null -_usage() { - printf "%b" " -The script can be used to upload file/directory to google drive.\n -Usage:\n ${0##*/} [options.. ] \n -Foldername argument is optional. If not provided, the file will be uploaded to preconfigured google drive.\n -File name argument is optional if create directory option is used.\n -Options:\n - -a | --account 'account name' - Use different account than the default one.\n - To change the default account name, use this format, -a/--account default=account_name\n - -la | --list-accounts - Print all configured accounts in the config files.\n - -ca | --create-account 'account name' - To create a new account with the given name if does not already exists.\n - -da | --delete-account 'account name' - To delete an account information from config file. \n - -c | -C | --create-dir - option to create directory. Will provide folder id. Can be used to provide input folder, see README.\n - -r | --root-dir or - google folder ID/URL to which the file/directory is going to upload. - If you want to change the default value, then use this format, -r/--root-dir default=root_folder_id/root_folder_url\n - -s | --skip-subdirs - Skip creation of sub folders and upload all files inside the INPUT folder/sub-folders in the INPUT folder, use this along with -p/--parallel option to speed up the uploads.\n - -p | --parallel - Upload multiple files in parallel, Max value = 10.\n - -f | --[file|folder] - Specify files and folders explicitly in one command, use multiple times for multiple folder/files. See README for more use of this command.\n - -cl | --clone - Upload a gdrive file without downloading, require accessible gdrive link or id as argument.\n - -o | --overwrite - Overwrite the files with the same name, if present in the root folder/input folder, also works with recursive folders.\n - -d | --skip-duplicates - Do not upload the files with the same name and size, if already present in the root folder/input folder, also works with recursive folders.\n - -cm | --check-mode - Additional flag for --overwrite and --skip-duplicates flag. Can be used to change check mode in those flags, available args are 'size' and 'md5'.\n - -desc | --description | --description-all - Specify description for the given file. To use the respective metadata of a file, below is the format:\n - File name ( fullname ): %f | Size: %s | Mime Type: %m\n - Now to actually use it: --description 'Filename: %f, Size: %s, Mime: %m'\n - Note: For files inside folders, use --description-all flag.\n - -S | --share - Share the uploaded input file/folder, grant reader permission to provided email address or to everyone with the shareable link.\n - -SM | -sm | --share-mode 'share mode' - Specify the share mode for sharing file.\n - Share modes are: r / reader - Read only permission.\n - : w / writer - Read and write permission.\n - : c / commenter - Comment only permission.\n - Note: Although this flag is independent of --share flag but when email is needed, then --share flag use is neccessary.\n - --speed 'speed' - Limit the download speed, supported formats: 1K, 1M and 1G.\n - -i | --save-info - Save uploaded files info to the given filename.\n - -z | --config - Override default config file with custom config file.\nIf you want to change default value, then use this format -z/--config default=default=your_config_file_path.\n - -q | --quiet - Supress the normal output, only show success/error upload messages for files, and one extra line at the beginning for folder showing no. of files and sub folders.\n - -R | --retry 'num of retries' - Retry the file upload if it fails, postive integer as argument. Currently only for file uploads.\n - -in | --include 'pattern' - Only include the files with the given pattern to upload - Applicable for folder uploads.\n - e.g: ${0##*/} local_folder --include "*1*", will only include with files with pattern '1' in the name.\n - -ex | --exclude 'pattern' - Exclude the files with the given pattern from uploading. - Applicable for folder uploads.\n - e.g: ${0##*/} local_folder --exclude "*1*", will exclude all the files pattern '1' in the name.\n - --hide - This flag will prevent the script to print sensitive information like root folder id and drivelink.\n - -v | --verbose - Display detailed message (only for non-parallel uploads).\n - -V | --verbose-progress - Display detailed message and detailed upload progress(only for non-parallel uploads).\n - --skip-internet-check - Do not check for internet connection, recommended to use in sync jobs.\n - $([ "${GUPLOAD_INSTALLED_WITH}" = script ] && printf '%s\n' '\n -u | --update - Update the installed script in your system.\n - -U | --uninstall - Uninstall script, remove related files.\n') - --info - Show detailed info, only if script is installed system wide.\n - -D | --debug - Display script command trace.\n - -h | --help - Display this message.\n" - exit 0 -} - -_short_help() { - printf "No valid arguments provided, use -h/--help flag to see usage.\n" - exit 0 -} - -################################################### -# Print the contents of info file if scipt is installed system wide. -# Path is INFO_FILE="${HOME}/.google-drive-upload/google-drive-upload.info" -# Globals: 1 variable -# INFO_FILE -# Arguments: None -# Result: read description -################################################### -_version_info() { - if command -v "${COMMAND_NAME}" 1> /dev/null && [ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]; then - for i in REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA CONFIG; do - printf "%s\n" "${i}=\"$(eval printf "%s" \"\$"${i}"\")\"" - done | sed -e "s/=/: /g" - else - printf "%s\n" "google-drive-upload is not installed system wide." - fi - exit 0 -} - ################################################### # Function to cleanup config file # Remove invalid access tokens on the basis of corresponding expiry -# Globals: None # Arguments: 1 # ${1} = config file -# Result: read description ################################################### _cleanup_config() { config="${1:?Error: Missing config}" && unset values_regex _tmp @@ -97,23 +18,20 @@ _cleanup_config() { token_value_name="${expiry_value_name%%_EXPIRY}" _tmp="${line##*=}" && _tmp="${_tmp%\"}" && expiry="${_tmp#\"}" - [ "${expiry}" -le "$(date +"%s")" ] && + [ "${expiry}" -le "$(_epoch)" ] && values_regex="${values_regex:+${values_regex}|}${expiry_value_name}=\".*\"|${token_value_name}=\".*\"" done 4<< EOF -$(grep -F ACCESS_TOKEN_EXPIRY "${config}" || :) +$(grep -F ACCESS_TOKEN_EXPIRY -- "${config}" || :) EOF - chmod u+w "${config}" && - printf "%s\n" "$(grep -Ev "^\$${values_regex:+|${values_regex}}" "${config}")" >| "${config}" && - chmod "a-w-r-x,u+r" "${config}" + chmod u+w -- "${config}" && + printf "%s\n" "$(grep -Ev "^\$${values_regex:+|${values_regex}}" -- "${config}")" >| "${config}" && + chmod "a-w-r-x,u+r" -- "${config}" return 0 } ################################################### # Process all arguments given to the script -# Globals: 1 variable, 1 function -# Variable - HOME -# Functions - _short_help # Arguments: Many # ${@}" = Flags with argument and file/folder input # Result: On @@ -126,10 +44,7 @@ _setup_arguments() { [ $# = 0 ] && printf "Missing arguments\n" && return 1 # Internal variables # De-initialize if any variables set already. - unset LIST_ACCOUNTS UPDATE_DEFAULT_ACCOUNT CUSTOM_ACCOUNT_NAME NEW_ACCOUNT_NAME DELETE_ACCOUNT_NAME ACCOUNT_ONLY_RUN - unset FOLDERNAME FINAL_LOCAL_INPUT_ARRAY FINAL_ID_INPUT_ARRAY CONTINUE_WITH_NO_INPUT - unset PARALLEL NO_OF_PARALLEL_JOBS SHARE SHARE_EMAIL SHARE_ROLE OVERWRITE SKIP_DUPLICATES CHECK_MODE SKIP_SUBDIRS DESCRIPTION ROOTDIR QUIET - unset VERBOSE VERBOSE_PROGRESS DEBUG LOG_FILE_ID CURL_SPEED RETRY + unset CONTINUE_WITH_NO_INPUT export CURL_PROGRESS="-s" EXTRA_LOG=":" CURL_PROGRESS_EXTRA="-s" INFO_PATH="${HOME}/.google-drive-upload" CONFIG_INFO="${INFO_PATH}/google-drive-upload.configpath" [ -f "${CONFIG_INFO}" ] && . "${CONFIG_INFO}" @@ -143,187 +58,7 @@ _setup_arguments() { REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob" \ TOKEN_URL="https://accounts.google.com/o/oauth2/token" - _check_config() { - [ -z "${1##default=*}" ] && export UPDATE_DEFAULT_CONFIG="_update_config" - { [ -r "${2}" ] && CONFIG="${2}"; } || { - printf "Error: Given config file (%s) doesn't exist/not readable,..\n" "${1}" 1>&2 && exit 1 - } - return 0 - } - - _check_longoptions() { - [ -z "${2}" ] && - printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && - exit 1 - return 0 - } - - while [ $# -gt 0 ]; do - case "${1}" in - -h | --help) _usage ;; - -D | --debug) DEBUG="true" && export DEBUG ;; - --info) _version_info ;; - -a | --account) - _check_longoptions "${1}" "${2}" - export CUSTOM_ACCOUNT_NAME="${2##default=}" && shift - [ -z "${2##default=*}" ] && export UPDATE_DEFAULT_ACCOUNT="_update_config" - ;; - -la | --list-accounts) export LIST_ACCOUNTS="true" ;; - # this flag is preferred over --account - -ca | --create-account) - _check_longoptions "${1}" "${2}" - export NEW_ACCOUNT_NAME="${2}" && shift - ;; - -da | --delete-account) - _check_longoptions "${1}" "${2}" - export DELETE_ACCOUNT_NAME="${2}" && shift - ;; - -c | -C | --create-dir) - _check_longoptions "${1}" "${2}" - FOLDERNAME="${2}" && shift - ;; - -r | --root-dir) - _check_longoptions "${1}" "${2}" - ROOTDIR="${2##default=}" - [ -z "${2##default=*}" ] && export UPDATE_DEFAULT_ROOTDIR="_update_config" - shift - ;; - -z | --config) - _check_longoptions "${1}" "${2}" - _check_config "${2}" "${2##default=}" - shift - ;; - -i | --save-info) - _check_longoptions "${1}" "${2}" - export LOG_FILE_ID="${2}" && shift - ;; - -s | --skip-subdirs) export SKIP_SUBDIRS="true" ;; - -p | --parallel) - _check_longoptions "${1}" "${2}" - NO_OF_PARALLEL_JOBS="${2}" - if [ "$((NO_OF_PARALLEL_JOBS))" -gt 0 ] 2>| /dev/null 1>&2; then - NO_OF_PARALLEL_JOBS="$((NO_OF_PARALLEL_JOBS > 10 ? 10 : NO_OF_PARALLEL_JOBS))" - else - printf "\nError: -p/--parallel value ranges between 1 to 10.\n" - exit 1 - fi - export PARALLEL_UPLOAD="parallel" && shift - ;; - -o | --overwrite) export OVERWRITE="Overwrite" UPLOAD_MODE="update" ;; - -d | --skip-duplicates) export SKIP_DUPLICATES="Skip Existing" UPLOAD_MODE="update" ;; - -cm | --check-mode) - _check_longoptions "${1}" "${2}" - case "${2}" in - size) export CHECK_MODE="2" && shift ;; - md5) export CHECK_MODE="3" && shift ;; - *) printf "\nError: -cm/--check-mode takes size and md5 as argument.\n" ;; - esac - ;; - -desc | --description | --description-all) - _check_longoptions "${1}" "${2}" - [ "${1}" = "--description-all" ] && export DESCRIPTION_ALL="true" - export DESCRIPTION="${2}" && shift - ;; - -f | --file | --folder) - _check_longoptions "${1}" "${2}" - LOCAL_INPUT_ARRAY="${LOCAL_INPUT_ARRAY} - ${2}" && shift - ;; - -cl | --clone) - _check_longoptions "${1}" "${2}" - FINAL_ID_INPUT_ARRAY="${FINAL_ID_INPUT_ARRAY} - $(_extract_id "${2}")" && shift - ;; - -S | --share) - SHARE="_share_id" - EMAIL_REGEX="^(([A-Za-z0-9]+((\.|\-|\_|\+)?[A-Za-z0-9]?)*[A-Za-z0-9]+)|[A-Za-z0-9]+)@(([A-Za-z0-9]+)+((\.|\-|\_)?([A-Za-z0-9]+)+)*)+\.([A-Za-z]{2,})+$" - case "${2}" in - -* | '') : ;; - *) - if printf "%s\n" "${2}" | grep -qE "${EMAIL_REGEX}"; then - SHARE_EMAIL="${2}" && shift && export SHARE_EMAIL - fi - ;; - esac - SHARE_ROLE="${SHARE_ROLE:-reader}" - ;; - -[Ss][Mm] | --share-mode) - _check_longoptions "${1}" "${2}" - case "${2}" in - r | read*) SHARE_ROLE="reader" ;; - w | write*) SHARE_ROLE="writer" ;; - c | comment*) SHARE_ROLE="commenter" ;; - *) - printf "%s\n" "Invalid share mode given ( ${2} ). Supported values are r or reader / w or writer / c or commenter." && - exit 1 - ;; - esac - SHARE="_share_id" - shift - ;; - --speed) - _check_longoptions "${1}" "${2}" - regex='^([0-9]+)([k,K]|[m,M]|[g,G])+$' - if printf "%s\n" "${2}" | grep -qE "${regex}"; then - export CURL_SPEED="--limit-rate ${2}" && shift - else - printf "Error: Wrong speed limit format, supported formats: 1K , 1M and 1G\n" 1>&2 - exit 1 - fi - ;; - -R | --retry) - _check_longoptions "${1}" "${2}" - if [ "$((2))" -gt 0 ] 2>| /dev/null 1>&2; then - export RETRY="${2}" && shift - else - printf "Error: -R/--retry only takes positive integers as arguments, min = 1, max = infinity.\n" - exit 1 - fi - ;; - -in | --include) - _check_longoptions "${1}" "${2}" - INCLUDE_FILES="${INCLUDE_FILES} -name '${2}' " && shift - ;; - -ex | --exclude) - _check_longoptions "${1}" "${2}" - EXCLUDE_FILES="${EXCLUDE_FILES} ! -name '${2}' " && shift - ;; - --hide) HIDE_INFO=":" ;; - -q | --quiet) export QUIET="_print_center_quiet" ;; - -v | --verbose) export VERBOSE="true" ;; - -V | --verbose-progress) export VERBOSE_PROGRESS="true" ;; - --skip-internet-check) export SKIP_INTERNET_CHECK=":" ;; - '') shorthelp ;; - *) # Check if user meant it to be a flag - if [ -z "${1##-*}" ]; then - [ "${GUPLOAD_INSTALLED_WITH}" = script ] && { - case "${1}" in - -u | --update) - _check_debug && _update && { exit 0 || exit 1; } - ;; - --uninstall) - _check_debug && _update uninstall && { exit 0 || exit 1; } - ;; - esac - } - printf '%s: %s: Unknown option\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && exit 1 - else - case "${1}" in - *drive.google.com* | *docs.google.com*) - FINAL_ID_INPUT_ARRAY="${FINAL_ID_INPUT_ARRAY} - $(_extract_id "${1}")" - ;; - *) - LOCAL_INPUT_ARRAY="${LOCAL_INPUT_ARRAY} - ${1}" - ;; - esac - fi - ;; - esac - shift - done - + _parse_arguments "_parser_setup_flags" "${@}" || return 1 _check_debug [ -n "${VERBOSE_PROGRESS}" ] && unset VERBOSE && export CURL_PROGRESS="" @@ -332,6 +67,7 @@ _setup_arguments() { # create info path folder, can be missing if gupload was not installed with install.sh mkdir -p "${INFO_PATH}" || return 1 + # post processing for --account, --delete-account, --create-acount and --list-accounts # handle account related flags here as we want to use the flags independenlty even with no normal valid inputs # delete account, --delete-account flag # TODO: add support for deleting multiple accounts @@ -339,17 +75,8 @@ _setup_arguments() { # list all configured accounts, --list-accounts flag [ -n "${LIST_ACCOUNTS}" ] && _all_accounts - # If no input, then check if -C option was used or not. - # check if given input exists ( file/folder ) - FINAL_LOCAL_INPUT_ARRAY="$(printf "%s\n" "${LOCAL_INPUT_ARRAY}" | while read -r input && { [ -n "${input}" ] || continue; }; do - { [ -r "${input}" ] && printf "%s\n" "${input}"; } || { - { "${QUIET:-_print_center}" 'normal' "[ Error: Invalid Input - ${input} ]" "=" && printf "\n"; } 1>&2 - continue - } - done)" - # If no input, then check if either -C option was used. - [ -z "${FINAL_LOCAL_INPUT_ARRAY:-${FINAL_ID_INPUT_ARRAY:-${FOLDERNAME}}}" ] && { + [ -z "${INPUT_FILE_1:-${INPUT_ID_1:-${FOLDERNAME}}}" ] && { # if any account related option was used then don't show short help [ -z "${DELETE_ACCOUNT_NAME:-${LIST_ACCOUNTS:-${NEW_ACCOUNT_NAME}}}" ] && _short_help # exit right away if --list-accounts or --delete-account flag was used @@ -364,18 +91,65 @@ _setup_arguments() { case "${SKIP_DUPLICATES:-${OVERWRITE}}" in "Overwrite") export CHECK_MODE="1" ;; "Skip Existing") export CHECK_MODE="2" ;; + *) : ;; esac } return 0 } +# setup cleanup after exit using traps +_setup_traps() { + export SUPPORT_ANSI_ESCAPES TMPFILE ACCESS_TOKEN ACCESS_TOKEN_EXPIRY INITIAL_ACCESS_TOKEN ACCOUNT_NAME CONFIG ACCESS_TOKEN_SERVICE_PID + _cleanup() { + # unhide the cursor if hidden + [ -n "${SUPPORT_ANSI_ESCAPES}" ] && printf "\033[?25h\033[?7h" + { + # update the config with latest ACCESS_TOKEN and ACCESS_TOKEN_EXPIRY only if changed + [ -f "${TMPFILE}_ACCESS_TOKEN" ] && { + . "${TMPFILE}_ACCESS_TOKEN" + [ "${INITIAL_ACCESS_TOKEN}" = "${ACCESS_TOKEN}" ] || { + _update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN" "${ACCESS_TOKEN}" "${CONFIG}" + _update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN_EXPIRY" "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}" + } + } || : 1>| /dev/null + + # grab all chidren processes of access token service + # https://askubuntu.com/a/512872 + [ -n "${ACCESS_TOKEN_SERVICE_PID}" ] && { + token_service_pids="$(ps --ppid="${ACCESS_TOKEN_SERVICE_PID}" -o pid=)" + # first kill parent id, then children processes + kill "${ACCESS_TOKEN_SERVICE_PID}" + } || : 1>| /dev/null + + # grab all script children pids + script_children_pids="$(ps --ppid="${MAIN_PID}" -o pid=)" + + # kill all grabbed children processes + # shellcheck disable=SC2086 + kill ${token_service_pids} ${script_children_pids} 1>| /dev/null + + rm -f "${TMPFILE:?}"* + + export abnormal_exit && if [ -n "${abnormal_exit}" ]; then + printf "\n\n%s\n" "Script exited manually." + kill "${_SCRIPT_KILL_SIGNAL:--9}" -$$ & + else + { _cleanup_config "${CONFIG}" && [ "${GUPLOAD_INSTALLED_WITH:-}" = script ] && _auto_update; } 1>| /dev/null & + fi + } 2>| /dev/null || : + return 0 + } + + trap 'abnormal_exit="1" ; exit' INT TERM + trap '_cleanup' EXIT + trap '' TSTP # ignore ctrl + z + + export MAIN_PID="$$" +} + ################################################### # Setup root directory where all file/folders will be uploaded/updated -# Globals: 5 variables, 6 functions -# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET -# Functions - _print_center, _drive_info, _extract_id, _update_config, _json_value, _set_value -# Arguments: None # Result: read description # If root id not found then print message and exit # Update config with root id and root id name if specified @@ -383,6 +157,7 @@ _setup_arguments() { # https://github.com/dylanaraps/pure-bash-bible#use-read-as-an-alternative-to-the-sleep-command ################################################### _setup_root_dir() { + export ROOTDIR ROOT_FOLDER ROOT_FOLDER_NAME QUIET ACCOUNT_NAME CONFIG UPDATE_DEFAULT_ROOTDIR _check_root_id() { _setup_root_dir_json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id")" if ! rootid_setup_root_dir="$(printf "%s\n" "${_setup_root_dir_json}" | _json_value id 1 1)"; then @@ -429,13 +204,10 @@ _setup_root_dir() { # Setup Workspace folder # Check if the given folder exists in google drive. # If not then the folder is created in google drive under the configured root folder. -# Globals: 2 variables, 3 functions -# Variables - FOLDERNAME, ROOT_FOLDER -# Functions - _create_directory, _drive_info, _json_value -# Arguments: None # Result: Read Description ################################################### _setup_workspace() { + export FOLDERNAME ROOT_FOLDER ROOT_FOLDER_NAME WORKSPACE_FOLDER_ID WORKSPACE_FOLDER_NAME if [ -z "${FOLDERNAME}" ]; then WORKSPACE_FOLDER_ID="${ROOT_FOLDER}" WORKSPACE_FOLDER_NAME="${ROOT_FOLDER_NAME}" @@ -449,22 +221,14 @@ _setup_workspace() { } ################################################### -# Process all the values in "${FINAL_LOCAL_INPUT_ARRAY[@]}" & "${FINAL_ID_INPUT_ARRAY[@]}" -# Globals: 22 variables, 17 functions -# Variables - FINAL_LOCAL_INPUT_ARRAY ( array ), ACCESS_TOKEN, VERBOSE, VERBOSE_PROGRESS -# WORKSPACE_FOLDER_ID, UPLOAD_MODE, SKIP_DUPLICATES, OVERWRITE, SHARE, -# UPLOAD_STATUS, COLUMNS, API_URL, API_VERSION, TOKEN_URL, LOG_FILE_ID -# FILE_ID, FILE_LINK, FINAL_ID_INPUT_ARRAY ( array ) -# PARALLEL_UPLOAD, QUIET, NO_OF_PARALLEL_JOBS, TMPFILE, SHARE_ROLE -# Functions - _print_center, _clear_line, _newline, _support_ansi_escapes, _print_center_quiet -# _upload_file, _share_id, _is_terminal, _dirname, -# _create_directory, _json_value, _url_encode, _check_existing_file, _bytes_to_human -# _clone_file, _get_access_token_and_update, _get_rootdir_id -# Arguments: None -# Result: Upload/Clone all the input files/folders, if a folder is empty, print Error message. +# Process all the values in "${ID_INPUT_ARRAY}" ################################################### _process_arguments() { - export SOURCE_UTILS + export SHARE SHARE_ROLE SHARE_EMAIL HIDE_INFO QUIET SKIP_DUPLICATES OVERWRITE \ + WORKSPACE_FOLDER_ID SOURCE_UTILS EXTRA_LOG SKIP_SUBDIRS INCLUDE_FILES EXCLUDE_FILES \ + QUIET PARALLEL_UPLOAD VERBOSE VERBOSE_PROGRESS CHECK_MODE DESCRIPTION DESCRIPTION_ALL \ + UPLOAD_MODE HIDE_INFO + # on successful uploads _share_and_print_link() { "${SHARE:-:}" "${1:-}" "${SHARE_ROLE}" "${SHARE_EMAIL}" @@ -476,11 +240,18 @@ _process_arguments() { return 0 } - unset Aseen && while read -r input <&4 && - case "${Aseen}" in - *"|:_//_:|${input}|:_//_:|"*) continue ;; - *) Aseen="${Aseen}|:_//_:|${input}|:_//_:|" ;; - esac do + _SEEN="" index_process_arguments=0 + # TOTAL_INPUTS and INPUT_FILE_* is exported in _parser_process_input function, see flags.sh + TOTAL_FILE_INPUTS="$((TOTAL_FILE_INPUTS < 0 ? 0 : TOTAL_FILE_INPUTS))" + until [ "${index_process_arguments}" -eq "${TOTAL_FILE_INPUTS}" ]; do + input="" + _set_value i input "INPUT_FILE_$((index_process_arguments += 1))" + # check if the arg was already done + case "${_SEEN}" in + *"${input}"*) continue ;; + *) _SEEN="${_SEEN}${input}" ;; + esac + # Check if the argument is a file or a directory. if [ -f "${input}" ]; then # export DESCRIPTION_FILE, used for descriptions in _upload_file function @@ -489,8 +260,8 @@ _process_arguments() { _print_center "justify" "Given Input" ": FILE" "=" _print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n" _upload_file_main noparse "${input}" "${WORKSPACE_FOLDER_ID}" - if [ "${RETURN_STATUS}" = 1 ]; then - _share_and_print_link "${FILE_ID}" + if [ "${RETURN_STATUS:-}" = 1 ]; then + _share_and_print_link "${FILE_ID:-}" printf "\n" else for _ in 1 2; do _clear_line 1; done && continue @@ -574,6 +345,7 @@ EOF for _ in 1 2 3; do _clear_line 1; done && EMPTY=1 fi fi + export SUCCESS_STATUS ERROR_STATUS ERROR_FILES if [ "${EMPTY}" != 1 ]; then [ -z "${VERBOSE:-${VERBOSE_PROGRESS}}" ] && for _ in 1 2; do _clear_line 1; done @@ -613,15 +385,19 @@ EOF printf "\n" fi fi - done 4<< EOF -$(printf "%s\n" "${FINAL_LOCAL_INPUT_ARRAY}") -EOF + done - unset Aseen && while read -r gdrive_id <&4 && { [ -n "${gdrive_id}" ] || continue; } && - case "${Aseen}" in - *"|:_//_:|${gdrive_id}|:_//_:|"*) continue ;; - *) Aseen="${Aseen}|:_//_:|${gdrive_id}|:_//_:|" ;; - esac do + _SEEN="" index_process_arguments=0 + # TOTAL_ID_INPUTS and INPUT_ID_* is exported in _parser_process_input function, see flags.sh + TOTAL_ID_INPUTS="$((TOTAL_ID_INPUTS < 0 ? 0 : TOTAL_ID_INPUTS))" + until [ "${index_process_arguments}" -eq "${TOTAL_ID_INPUTS}" ]; do + gdrive_id="" + _set_value gdrive_id "INPUT_ID_$((index_process_arguments += 1))" + # check if the arg was already done + case "${_SEEN}" in + *"${gdrive_id}"*) continue ;; + *) _SEEN="${_SEEN}${gdrive_id}" ;; + esac _print_center "justify" "Given Input" ": ID" "=" "${EXTRA_LOG}" "justify" "Checking if id exists.." "-" [ "${CHECK_MODE}" = "md5Checksum" ] && param="md5Checksum" @@ -657,80 +433,20 @@ EOF "${QUIET:-_print_center}" "justify" "File ID (${HIDE_INFO:-gdrive_id})" " invalid." "=" 1>&2 printf "\n" fi - done 4<< EOF -$(printf "%s\n" "${FINAL_ID_INPUT_ARRAY}") -EOF + done return 0 } -main() { - [ $# = 0 ] && _short_help - - if [ -z "${SELF_SOURCE}" ]; then - export UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" - export SOURCE_UTILS='. '${UTILS_FOLDER}/auth-utils.sh' && . '${UTILS_FOLDER}/common-utils.sh' && . '${UTILS_FOLDER}/drive-utils.sh' && . '${UTILS_FOLDER}/upload-utils.sh'' - else - SCRIPT_PATH="$(cd "$(_dirname "${0}")" && pwd)/${0##*\/}" && export SCRIPT_PATH - export SOURCE_UTILS='SOURCED_GUPLOAD=true . '${SCRIPT_PATH}'' - fi - eval "${SOURCE_UTILS}" || { printf "Error: Unable to source util files.\n" && exit 1; } - - set -o errexit -o noclobber - +# this function is called from _main function for respective sh and bash scripts +_main_helper() { _setup_arguments "${@}" || exit 1 "${SKIP_INTERNET_CHECK:-_check_internet}" || exit 1 - { { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || - TMPFILE="$(pwd)/.$(_t="$(date +'%s')" && printf "%s\n" "$((_t * _t))").LOG"; } || exit 1 + TMPFILE="$(command -v mktemp 1>| /dev/null && mktemp -u)" || TMPFILE="$(pwd)/.$(_t="$(_epoch)" && printf "%s\n" "$((_t * _t))").tmpfile" export TMPFILE - _cleanup() { - # unhide the cursor if hidden - [ -n "${SUPPORT_ANSI_ESCAPES}" ] && printf "\033[?25h\033[?7h" - { - # update the config with latest ACCESS_TOKEN and ACCESS_TOKEN_EXPIRY only if changed - [ -f "${TMPFILE}_ACCESS_TOKEN" ] && { - . "${TMPFILE}_ACCESS_TOKEN" - [ "${INITIAL_ACCESS_TOKEN}" = "${ACCESS_TOKEN}" ] || { - _update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN" "${ACCESS_TOKEN}" "${CONFIG}" - _update_config "ACCOUNT_${ACCOUNT_NAME}_ACCESS_TOKEN_EXPIRY" "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}" - } - } || : 1>| /dev/null - - # grab all chidren processes of access token service - # https://askubuntu.com/a/512872 - [ -n "${ACCESS_TOKEN_SERVICE_PID}" ] && { - token_service_pids="$(ps --ppid="${ACCESS_TOKEN_SERVICE_PID}" -o pid=)" - # first kill parent id, then children processes - kill "${ACCESS_TOKEN_SERVICE_PID}" - } || : 1>| /dev/null - - # grab all script children pids - script_children_pids="$(ps --ppid="${MAIN_PID}" -o pid=)" - - # kill all grabbed children processes - # shellcheck disable=SC2086 - kill ${token_service_pids} ${script_children_pids} 1>| /dev/null - - rm -f "${TMPFILE:?}"* - - export abnormal_exit && if [ -n "${abnormal_exit}" ]; then - printf "\n\n%s\n" "Script exited manually." - kill -9 -$$ & - else - { _cleanup_config "${CONFIG}" && [ "${GUPLOAD_INSTALLED_WITH}" = script ] && _auto_update; } 1>| /dev/null & - fi - } 2>| /dev/null || : - return 0 - } - - trap 'abnormal_exit="1" ; exit' INT TERM - trap '_cleanup' EXIT - trap '' TSTP # ignore ctrl + z - - export MAIN_PID="$$" - - START="$(date +'%s')" + # setup a cleanup function and use it with traps, also export MAIN_PID + _setup_traps "${EXTRA_LOG}" "justify" "Checking credentials.." "-" { _check_credentials && _clear_line 1; } || @@ -751,14 +467,15 @@ main() { _print_center "justify" "Workspace Folder: ${WORKSPACE_FOLDER_NAME}" "=" "${HIDE_INFO:-_print_center}" "normal" " ${WORKSPACE_FOLDER_ID} " "-" && _newline "\n" + START="$(_epoch)" + # hide the cursor if ansi escapes are supported [ -n "${SUPPORT_ANSI_ESCAPES}" ] && printf "\033[?25l" _process_arguments - END="$(date +'%s')" + END="$(_epoch)" DIFF="$((END - START))" - "${QUIET:-_print_center}" 'normal' " Time Elapsed: $((DIFF / 60)) minute(s) and $((DIFF % 60)) seconds " "=" -} + "${QUIET:-_print_center}" "normal" " Time Elapsed: ""$((DIFF / 60))"" minute(s) and ""$((DIFF % 60))"" seconds. " "=" -{ [ -z "${SOURCED_GUPLOAD}" ] && main "${@}"; } || : +} diff --git a/src/common/upload-flags.sh b/src/common/upload-flags.sh new file mode 100644 index 0000000..015176f --- /dev/null +++ b/src/common/upload-flags.sh @@ -0,0 +1,595 @@ +#!/usr/bin/env sh +# shellcheck source=/dev/null + +################################################### +# setup all the flags help, stuff to be executed for them and pre process +# todo: maybe post processing too +################################################### +_parser_setup_flags() { + # add initial help text which will appear at start + _parser_add_help " +The script can be used to upload file/directory to google drive. + +Usage: +${0##*/} [options.. ] + +Foldername argument is optional. If not provided, the file will be uploaded to preconfigured google drive root folder. + +File name argument is optional if create directory option is used. + +Options:" + + ################################################### + + # not a flag exactly, but will be used to process any arguments which is not a flag + _parser_setup_flag "input" 0 + _parser_setup_flag_help \ + "Input files or drive ids to process." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset TOTAL_ID_INPUTS TOTAL_FILE_INPUTS +EOF + _parser_setup_flag_process 4<< 'EOF' +# set INPUT_FILE|ID_num to the input, where num is rank of input +case "${1}" in + *drive.google.com* | *docs.google.com*) _set_value d "INPUT_ID_$((TOTAL_ID_INPUTS += 1))" "$(_extract_id "${1}")" ;; + *) + [ -r "${1}" ] || { + { "${QUIET:-_print_center}" 'normal' "[ Error: Invalid File - ${1} ]" "=" && printf "\n"; } 1>&2 + return + } + _set_value d "INPUT_FILE_$((TOTAL_FILE_INPUTS += 1))" "${1}" + ;; +esac +EOF + + ################################################### + + _parser_setup_flag "-a --account" 1 required "account name" + _parser_setup_flag_help \ + "Use a different account than the default one. + +To change the default account name, use this format, -a/--account default=account_name" + + _parser_setup_flag_preprocess 4<< 'EOF' +unset OAUTH_ENABLED ACCOUNT_NAME ACCOUNT_ONLY_RUN CUSTOM_ACCOUNT_NAME UPDATE_DEFAULT_ACCOUNT +EOF + + _parser_setup_flag_process 4<< 'EOF' +export OAUTH_ENABLED="true" CUSTOM_ACCOUNT_NAME="${2##default=}" +[ -z "${2##default=*}" ] && export UPDATE_DEFAULT_ACCOUNT="_update_config" +_parser_shift +EOF + + ################################################### + + _parser_setup_flag "-la --list-accounts" 0 + _parser_setup_flag_help \ + "Print all configured accounts in the config files." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset LIST_ACCOUNTS +EOF + + _parser_setup_flag_process 4<< 'EOF' +export LIST_ACCOUNTS="true" +EOF + + ################################################### + + _parser_setup_flag "-ca --create-account" 1 required "account name" + _parser_setup_flag_help \ + "To create a new account with the given name if does not already exists." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset OAUTH_ENABLED NEW_ACCOUNT_NAME +EOF + + _parser_setup_flag_process 4<< 'EOF' +export OAUTH_ENABLED="true" +export NEW_ACCOUNT_NAME="${2}" && _parser_shift +EOF + + ################################################### + + _parser_setup_flag "-da --delete-account" 1 required "account name" + _parser_setup_flag_help \ + "To delete an account information from config file." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset DELETE_ACCOUNT_NAME +EOF + + _parser_setup_flag_process 4<< 'EOF' +export DELETE_ACCOUNT_NAME="${2}" && _parser_shift +EOF + + ################################################### + + _parser_setup_flag "-c -C --create-dir" 1 required "foldername" + _parser_setup_flag_help \ + "Option to create directory. Will print folder id. Can be used to provide input folder, see README." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset FOLDERNAME +EOF + + _parser_setup_flag_process 4<< 'EOF' +export FOLDERNAME="${2}" && _parser_shift +EOF + + ################################################### + + _parser_setup_flag "-r --root-dir" 1 required "google folder id or folder url containing id" + _parser_setup_flag_help \ + "Google folder ID/URL to which the file/directory is going to upload. +If you want to change the default value, then use this format, -r/--root-dir default=root_folder_id/root_folder_url" + + _parser_setup_flag_preprocess 4<< 'EOF' +unset ROOTDIR UPDATE_DEFAULT_ROOTDIR +EOF + + _parser_setup_flag_process 4<< 'EOF' +ROOTDIR="${2##default=}" +[ -z "${2##default=*}" ] && export UPDATE_DEFAULT_ROOTDIR="_update_config" +_parser_shift +EOF + + ################################################### + + _parser_setup_flag "-s --skip-subdirs" 0 + _parser_setup_flag_help \ + "Skip creation of sub folders and upload all files inside the INPUT folder/sub-folders in the INPUT folder, use this along with -p/--parallel option to speed up the uploads." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset SKIP_SUBDIRS +EOF + + _parser_setup_flag_process 4<< 'EOF' +export SKIP_SUBDIRS="true" +EOF + + ################################################### + + _parser_setup_flag "-p --parallel" 1 required "no of files to parallely upload" + _parser_setup_flag_help \ + "Upload multiple files in parallel, Max value = 10." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset NO_OF_PARALLEL_JOBS PARALLEL_UPLOAD +EOF + + _parser_setup_flag_process 4<< 'EOF' +if [ "${2}" -gt 0 ] 2>| /dev/null 1>&2; then + export NO_OF_PARALLEL_JOBS="${2}" +else + printf "\nError: -p/--parallel accepts values between 1 to 10.\n" + return 1 +fi +export PARALLEL_UPLOAD="parallel" && _parser_shift +EOF + + ################################################### + + _parser_setup_flag "-cl --clone" 1 required "gdrive id or link" + _parser_setup_flag_help \ + "Upload a gdrive file without downloading." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset TOTAL_ID_INPUTS +EOF + + _parser_setup_flag_process 4<< 'EOF' +# set INPUT_FILE|ID_num to the input, where num is rank of input +case "${1}" in + *drive.google.com* | *docs.google.com*) _set_value d "INPUT_ID_$((TOTAL_ID_INPUTS += 1))" "$(_extract_id "${1}")" ;; +esac +_parser_shift +EOF + + ################################################### + + _parser_setup_flag "-o --overwrite" 0 + _parser_setup_flag_help \ + "Overwrite the files with the same name, if present in the root folder/input folder, also works with recursive folders." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset OVERWRITE UPLOAD_MODE +EOF + + _parser_setup_flag_process 4<< 'EOF' +export OVERWRITE="Overwrite" UPLOAD_MODE="update" +EOF + + ################################################### + + _parser_setup_flag "-d --skip-duplicates" 0 + _parser_setup_flag_help \ + "Do not upload the files with the same name and size, if already present in the root folder/input folder, also works with recursive folders." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset SKIP_DUPLICATES UPLOAD_MODE +EOF + + _parser_setup_flag_process 4<< 'EOF' +export SKIP_DUPLICATES="Skip Existing" UPLOAD_MODE="update" +EOF + + ################################################### + + _parser_setup_flag "-cm --check-mode" 1 required "size or md5" + _parser_setup_flag_help \ + "Additional flag for --overwrite and --skip-duplicates flag. Can be used to change check mode in those flags, available args are 'size' and 'md5'." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset CHECK_MODE +EOF + + _parser_setup_flag_process 4<< 'EOF' +case "${2}" in + size) export CHECK_MODE="2" && _parser_shift ;; + md5) export CHECK_MODE="3" && _parser_shift ;; + *) printf "\nError: -cm/--check-mode takes size and md5 as argument.\n" ;; +esac +EOF + + ################################################### + + _parser_setup_flag "-desc --description --description-all" 1 required "description of file" + _parser_setup_flag_help \ + "Specify description for the given file. To use the respective metadata of a file, below is the format: + +File name ( fullname ): %f | Size: %s | Mime Type: %m + +Now to actually use it: --description 'Filename: %f, Size: %s, Mime: %m' + +Note: For files inside folders, use --description-all flag." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset DESCRIPTION DESCRIPTION_ALL +EOF + + _parser_setup_flag_process 4<< 'EOF' +[ "${1}" = "--description-all" ] && export DESCRIPTION_ALL="true" +export DESCRIPTION="${2}" && _parser_shift +EOF + + ################################################### + + _parser_setup_flag "-S --share" 1 required "email address" + _parser_setup_flag_help \ + "Share the uploaded input file/folder, grant reader permission to provided email address or to everyone with the shareable link." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset SHARE EMAIL_REGEX SHARE_EMAIL +EOF + + _parser_setup_flag_process 4<< 'EOF' +SHARE="_share_id" +EMAIL_REGEX="^(([A-Za-z0-9]+((\.|\-|\_|\+)?[A-Za-z0-9]?)*[A-Za-z0-9]+)|[A-Za-z0-9]+)@(([A-Za-z0-9]+)+((\.|\-|\_)?([A-Za-z0-9]+)+)*)+\.([A-Za-z]{2,})+$" +case "${2}" in + -* | '') : ;; + *) + if _assert_regex "${EMAIL_REGEX}" "${2}"; then + SHARE_EMAIL="${2}" && _parser_shift && export SHARE_EMAIL + fi + ;; +esac +SHARE_ROLE="${SHARE_ROLE:-reader}" +EOF + + ################################################### + + _parser_setup_flag "-SM -sm --share-mode" 1 required "share mode - r/w/c" + _parser_setup_flag_help \ + "Specify the share mode for sharing file. + + Share modes are: r / reader - Read only permission. + + : w / writer - Read and write permission. + + : c / commenter - Comment only permission. + +Note: Although this flag is independent of --share flag but when email is needed, then --share flag use is neccessary." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset SHARE_ROLE SHARE +EOF + + _parser_setup_flag_process 4<< 'EOF' +case "${2}" in + r | read*) SHARE_ROLE="reader" ;; + w | write*) SHARE_ROLE="writer" ;; + c | comment*) SHARE_ROLE="commenter" ;; + *) + printf "%s\n" "Invalid share mode given ( ${2} ). Supported values are r or reader / w or writer / c or commenter." && + exit 1 + ;; +esac +SHARE="_share_id" +_parser_shift +EOF + + ################################################### + + _parser_setup_flag "--speed" 1 required "speed" + _parser_setup_flag_help \ + "Limit the download speed, supported formats: 1K, 1M and 1G." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset CURL_SPEED +EOF + + _parser_setup_flag_process 4<< 'EOF' +_tmp_regex='^([0-9]+)([k,K]|[m,M]|[g,G])+$' +if _assert_regex "${_tmp_regex}" "${2}"; then + export CURL_SPEED="--limit-rate ${2}" && _parser_shift +else + printf "Error: Wrong speed limit format, supported formats: 1K , 1M and 1G\n" 1>&2 + exit 1 +fi +EOF + + ################################################### + + _parser_setup_flag "-i --save-info" 1 required "file where to save info" + _parser_setup_flag_help \ + "Save uploaded files info to the given filename." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset LOG_FILE_ID +EOF + + _parser_setup_flag_process 4<< 'EOF' +export LOG_FILE_ID="${2}" && _parser_shift +EOF + + ################################################### + + _parser_setup_flag "-z --config" 1 required "config path" + _parser_setup_flag_help \ + "Override default config file with custom config file. +If you want to change default value, then use this format -z/--config default=default=your_config_file_path." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset UPDATE_DEFAULT_CONFIG +_check_config() { + [ -z "${1##default=*}" ] && export UPDATE_DEFAULT_CONFIG="_update_config" + { [ -r "${2}" ] && CONFIG="${2}"; } || { + printf "Error: Given config file (%s) doesn't exist/not readable,..\n" "${1}" 1>&2 && exit 1 + } + return 0 +} +EOF + + _parser_setup_flag_process 4<< 'EOF' +_check_config "${2}" "${2/default=/}" +_parser_shift +EOF + + ################################################### + + _parser_setup_flag "-q --quiet" 0 + _parser_setup_flag_help \ + "Supress the normal output, only show success/error upload messages for files, and one extra line at the beginning for folder showing no. of files and sub folders." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset QUIET +EOF + + _parser_setup_flag_process 4<< 'EOF' +export QUIET="_print_center_quiet" +EOF + + ################################################### + + _parser_setup_flag "-R --retry" 1 required "num of retries" + _parser_setup_flag_help \ + "Retry the file upload if it fails, postive integer as argument. Currently only for file uploads." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset RETRY +EOF + + _parser_setup_flag_process 4<< 'EOF' +if [ "$((2))" -gt 0 ] 2>| /dev/null 1>&2; then + export RETRY="${2}" && _parser_shift +else + printf "Error: -R/--retry only takes positive integers as arguments, min = 1, max = infinity.\n" + exit 1 +fi +EOF + + ################################################### + + _parser_setup_flag "-in --include" 1 required "pattern" + _parser_setup_flag_help \ + "Only include the files with the given pattern to upload - Applicable for folder uploads. + +e.g: ${0##*/} local_folder --include '*1*', will only include with files with pattern '1' in the name." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset INCLUDE_FILES +EOF + + _parser_setup_flag_process 4<< 'EOF' +INCLUDE_FILES="${INCLUDE_FILES} -name '${2}' " && _parser_shift +EOF + + ################################################### + + _parser_setup_flag "-ex --exclude" 1 required "pattern" + _parser_setup_flag_help \ + "Exclude the files with the given pattern from uploading. - Applicable for folder uploads. + +e.g: ${0##*/} local_folder --exclude "*1*", will exclude all the files pattern '1' in the name." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset EXCLUDE_FILES +EOF + + _parser_setup_flag_process 4<< 'EOF' +EXCLUDE_FILES="${EXCLUDE_FILES} -name ! '${2}' " && _parser_shift +EOF + + ################################################### + + _parser_setup_flag "--hide" 0 + _parser_setup_flag_help \ + "This flag will prevent the script to print sensitive information like root folder id and drivelink." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset HIDE_INFO +EOF + + _parser_setup_flag_process 4<< 'EOF' +HIDE_INFO=":" +EOF + + ################################################### + + _parser_setup_flag "-v --verbose" 0 + _parser_setup_flag_help \ + "Display detailed message (only for non-parallel uploads)." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset VERBOSE +EOF + + _parser_setup_flag_process 4<< 'EOF' +export VERBOSE="true" +EOF + + ################################################### + + _parser_setup_flag "-V --verbose-progress" 0 + _parser_setup_flag_help \ + "Display detailed message and detailed upload progress(only for non-parallel uploads)." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset VERBOSE_PROGRESS +EOF + + _parser_setup_flag_process 4<< 'EOF' +export VERBOSE_PROGRESS="true" +EOF + + ################################################### + + _parser_setup_flag "--skip-internet-check" 0 + _parser_setup_flag_help \ + "Do not check for internet connection, recommended to use in sync jobs." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset SKIP_INTERNET_CHECK +EOF + + _parser_setup_flag_process 4<< 'EOF' +export SKIP_INTERNET_CHECK=":" +EOF + + ################################################### + + _parser_setup_flag "-V --version --info" 0 + _parser_setup_flag_help \ + "Show detailed info, only if script is installed system wide." + + _parser_setup_flag_preprocess 4<< 'EOF' +################################################### +# Print info if installed +################################################### +_version_info() { + export COMMAND_NAME REPO INSTALL_PATH TYPE TYPE_VALUE + if command -v "${COMMAND_NAME}" 1> /dev/null && [ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]; then + for i in REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA CONFIG; do + value_version_info="" + _set_value i value_version_info "${i}" + printf "%s\n" "${i}=${value_version_info}" + done | sed -e "s/=/: /g" + else + printf "%s\n" "google-drive-upload is not installed system wide." + fi + exit 0 +} +EOF + + _parser_setup_flag_process 4<< 'EOF' +_version_info +EOF + + ################################################### + + _parser_setup_flag "-D --debug" 0 + _parser_setup_flag_help \ + "Display script command trace." + + _parser_setup_flag_preprocess 4<< 'EOF' +unset DEBUG +EOF + + _parser_setup_flag_process 4<< 'EOF' +export DEBUG="true" +EOF + + ################################################### + + _parser_setup_flag "-h --help" 1 optional "flag name" + _parser_setup_flag_help \ + "Print help for all flags and basic usage instructions. + +To see help for a specific flag, --help flag_name ( with or without dashes ) + e.g: ${0##*/} --help aria" + + _parser_setup_flag_preprocess 4<< 'EOF' +################################################### +# 1st arg - can be flag name +# if 1st arg given, print specific flag help +# otherwise print full help +################################################### +_usage() { + [ -n "${1}" ] && { + help_usage_usage="" + _flag_help "${1}" help_usage_usage + + if [ -z "${help_usage_usage}" ]; then + printf "%s\n" "Error: No help found for ${1}" + else + printf "%s\n%s\n%s\n" "${__PARSER_BAR}" "${help_usage_usage}" "${__PARSER_BAR}" + fi + exit 0 + } + + printf "%s\n" "${_PARSER_ALL_HELP}" + exit 0 +} +EOF + + _parser_setup_flag_process 4<< 'EOF' +_usage "${2}" +EOF + ################################################### + + # should be only available if installed using install script + [ "${GUPLOAD_INSTALLED_WITH:-}" = script ] && { + _parser_setup_flag "-u --update" 0 + _parser_setup_flag_help \ + "Update the installed script in your system." + + _parser_setup_flag_process 4<< 'EOF' +_check_debug && _update && { exit 0 || exit 1; } +EOF + + ######################### + + _parser_setup_flag "--uninstall" 0 + _parser_setup_flag_help \ + "Uninstall script, remove related files." + + _parser_setup_flag_process 4<< 'EOF' +_check_debug && _update uninstall && { exit 0 || exit 1; } +EOF + } + + ################################################### + return 0 +} diff --git a/sh/upload-utils.sh b/src/common/upload-utils.sh old mode 100755 new mode 100644 similarity index 93% rename from sh/upload-utils.sh rename to src/common/upload-utils.sh index 583d4bf..b49fd1c --- a/sh/upload-utils.sh +++ b/src/common/upload-utils.sh @@ -5,19 +5,16 @@ # A simple wrapper to check tempfile for access token and make authorized oauth requests to drive api ################################################### _api_request() { - . "${TMPFILE}_ACCESS_TOKEN" + . "${TMPFILE:-}_ACCESS_TOKEN" curl --compressed \ - -H "Authorization: Bearer ${ACCESS_TOKEN}" \ + -H "Authorization: Bearer ${ACCESS_TOKEN:-}" \ "${@}" } ################################################### # Used in collecting file properties from output json after a file has been uploaded/cloned # Also handles logging in log file if LOG_FILE_ID is set -# Globals: 1 variables, 2 functions -# Variables - LOG_FILE_ID -# Functions - _error_logging_upload, _json_value # Arguments: 1 # ${1} = output jsom # Result: set fileid and link, save info to log file if required @@ -56,9 +53,6 @@ _error_logging_upload() { ################################################### # A small function to get rootdir id for files in sub folder uploads -# Globals: 1 variable, 1 function -# Variables - DIRIDS -# Functions - _dirname # Arguments: 1 # ${1} = filename # Result: read discription @@ -108,9 +102,6 @@ _upload_file_main() { ################################################### # Upload all files in the given folder, parallelly or non-parallely and show progress -# Globals: 7 variables, 3 functions -# Variables - VERBOSE, VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET -# Functions - _clear_line, _newline, _print_center and _upload_file_main # Arguments: 4 # ${1} = parallel or normal # ${2} = parse or norparse @@ -119,8 +110,9 @@ _upload_file_main() { # Result: read discription, set SUCCESS_STATUS & ERROR_STATUS ################################################### _upload_folder() { + export VERBOSE VERBOSE_PROGRESS NO_OF_PARALLEL_JOBS TMPFILE NO_OF_FILES [ $# -lt 3 ] && printf "Missing arguments\n" && return 1 - mode_upload_folder="${1}" PARSE_MODE="${2}" files_upload_folder="${3}" ID="${4:-}" && export PARSE_MODE ID + mode_upload_folder="${1}" PARSE_MODE="${2}" files_upload_folder="${3}" ID="${4:-}" SUCCESS_STATUS=0 SUCCESS_FILES="" ERROR_STATUS=0 ERROR_FILES="" case "${mode_upload_folder}" in normal) @@ -145,6 +137,7 @@ EOF [ -f "${TMPFILE}"SUCCESS ] && rm "${TMPFILE}"SUCCESS [ -f "${TMPFILE}"ERROR ] && rm "${TMPFILE}"ERROR + export PARSE_MODE ID # shellcheck disable=SC2016 (printf "%s\n" "${files_upload_folder}" | xargs -P"${NO_OF_PARALLEL_JOBS_FINAL}" -I "{}" -n 1 sh -c ' eval "${SOURCE_UTILS}" @@ -168,6 +161,7 @@ EOF ERROR_STATUS="$(($(wc -l < "${TMPFILE}"ERROR)))" ERROR_FILES="$(cat "${TMPFILE}"ERROR)" export SUCCESS_FILES ERROR_FILES ;; + *) : ;; esac return 0 } diff --git a/sh/google-oauth2.sh b/src/google-oauth2.sh old mode 100755 new mode 100644 similarity index 88% rename from sh/google-oauth2.sh rename to src/google-oauth2.sh index 59bbca1..ca37696 --- a/sh/google-oauth2.sh +++ b/src/google-oauth2.sh @@ -1,7 +1,7 @@ #!/usr/bin/env sh # shellcheck source=/dev/null -set -o errexit -o noclobber +set -o noclobber _usage() { printf "%s\n" " @@ -13,7 +13,7 @@ Usage: ./${0##*/} add - authenticates a new user but will use the client id and secret if available. If not, then same as create flag. - ./${0##*/} refresh - gets a new access token. Make sure CLIENT_SECRET, CLIENT_ID and REFRESH_TOKEN is exported as an environment variable or CONFIG + ./${0##*/} refresh - gets a new access token. Make sure CLIENT_SECRET, CLIENT_ID and REFRESH_TOKEN is exported as an environment variable or CONFIG ./${0##*/} help - show this help. @@ -26,7 +26,7 @@ You can also export CLIENT_SECRET, CLIENT_ID and REFRESH_TOKEN as an environment } UTILS_FOLDER="${UTILS_FOLDER:-$(pwd)}" -{ . "${UTILS_FOLDER}"/common-utils.bash && . "${UTILS_FOLDER}"/auth-utils.bash; } || { printf "Error: Unable to source util files.\n" && exit 1; } +{ . "${UTILS_FOLDER}"/sh/common-utils.sh && . "${UTILS_FOLDER}"/common/common-utils.sh && . "${UTILS_FOLDER}"/common/auth-utils.sh; } || { printf "Error: Unable to source util files.\n" && exit 1; } [ $# = 0 ] && _usage @@ -78,9 +78,10 @@ case "${1}" in [ -z "${CLIENT_SECRET}" ] && printf "%s\n" "Missing CLIENT_SECRET variable, make sure to export to use refresh option." && _usage [ -z "${REFRESH_TOKEN}" ] && printf "%s\n" "Missing REFRESH_TOKEN variable, make sure to export to use refresh option." && _usage ;; + *) ;; esac -_check_account_credentials || exit 1 +_check_account_credentials "${DEFAULT_ACCOUNT:-dummy}" || exit 1 [ -n "${CREATE_ACCOUNT}" ] && printf "Refresh Token: %s\n\n" "${REFRESH_TOKEN}" 1>&2 -printf "Access Token: %s\n" "${ACCESS_TOKEN}" 1>&2 +printf "Access Token: %s\n" "${ACCESS_TOKEN:-}" 1>&2 exit 0 diff --git a/gsync.service b/src/gsync.service similarity index 100% rename from gsync.service rename to src/gsync.service diff --git a/src/sh/.editorconfig b/src/sh/.editorconfig new file mode 100644 index 0000000..f1c458c --- /dev/null +++ b/src/sh/.editorconfig @@ -0,0 +1,19 @@ +# EditorConfig is awesome: https://EditorConfig.org + +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +# for shfmt +[*.sh] +indent_style = space +indent_size = 4 +shell_variant = posix +switch_case_indent = true +space_redirects = true \ No newline at end of file diff --git a/src/sh/common-utils.sh b/src/sh/common-utils.sh new file mode 100644 index 0000000..775cc78 --- /dev/null +++ b/src/sh/common-utils.sh @@ -0,0 +1,75 @@ +#!/usr/bin/env sh +# Functions that will used in core script +# posix functions + +################################################### +# Check if something contains some +# Arguments: +# ${1} = pattern to match, can be regex +# ${2} = string where it should match the pattern +# Result: return 0 or 1 +################################################### +_assert_regex() { + grep -qE "${1:?Error: Missing pattern}" 0<< EOF +${2:?Missing string} +EOF +} + +################################################### +# count number of lines using wc +################################################### +_count() { + wc -l +} + +################################################### +# Print epoch seconds +################################################### +_epoch() { + date +'%s' +} + +################################################### +# fetch column size and check if greater than the num ( see in function) +# return 1 or 0 +################################################### +_required_column_size() { + COLUMNS="$({ command -v bash 1>| /dev/null && bash -c 'shopt -s checkwinsize && (: && :); printf "%s\n" "${COLUMNS}" 2>&1'; } || + { command -v zsh 1>| /dev/null && zsh -c 'printf "%s\n" "${COLUMNS}"'; } || + { command -v stty 1>| /dev/null && _tmp="$(stty size)" && printf "%s\n" "${_tmp##* }"; } || + { command -v tput 1>| /dev/null && tput cols; })" || : + + [ "$((COLUMNS))" -gt 45 ] && return 0 +} + +################################################### +# Evaluates value1=value2 +# Arguments: 3 +# ${1} = direct ( d ) or indirect ( i ) - ( evaluation mode ) +# ${2} = var name +# ${3} = var value +# Result: export value1=value2 +################################################### +_set_value() { + case "${1:?}" in + d | direct) export "${2:?}=${3}" ;; + i | indirect) eval export "${2}"=\"\$"${3}"\" ;; + *) return 1 ;; + esac +} + +################################################### +# Encode the given string to parse properly in network requests +# Arguments: 1 +# ${1} = string +# Result: print encoded string +# Reference: +# https://stackoverflow.com/a/41405682 +################################################### +_url_encode() ( + LC_ALL=C LANG=C + awk 'BEGIN {while (y++ < 125) z[sprintf("%c", y)] = y + while (y = substr(ARGV[1], ++j, 1)) + q = y ~ /[[:alnum:]]_.!~*\47()-]/ ? q y : q sprintf("%%%02X", z[y]) + print q}' "${1}" +) diff --git a/bash/sync.bash b/src/sync.bash old mode 100755 new mode 100644 similarity index 91% rename from bash/sync.bash rename to src/sync.bash index 62fdd58..5917149 --- a/bash/sync.bash +++ b/src/sync.bash @@ -42,7 +42,6 @@ _short_help() { ################################################### # Check if a pid exists by using ps -# Globals: None # Arguments: 1 # ${1} = pid number of a sync job # Result: return 0 or 1 @@ -53,9 +52,6 @@ _check_pid() { ################################################### # Show information about a specific sync job -# Globals: 1 variable, 2 functions -# Variable - SYNC_LIST -# Functions - _check_pid, _setup_loop_variables # Arguments: 1 # ${1} = pid number of a sync job # ${2} = anything: Prints extra information ( optional ) @@ -95,11 +91,8 @@ _get_job_info() { ################################################### # Remove a sync job information from database -# Globals: 2 variables -# SYNC_LIST, SYNC_DETAIL_DIR # Arguments: 1 # ${1} = pid number of a sync job -# Result: read description ################################################### _remove_job() { declare pid="${1}" input local_folder drive_folder new_list @@ -120,11 +113,8 @@ _remove_job() { ################################################### # Kill a sync job and do _remove_job -# Globals: 1 function -# _remove_job # Arguments: 1 # ${1} = pid number of a sync job -# Result: read description ################################################### _kill_job() { declare pid="${1}" @@ -135,12 +125,8 @@ _kill_job() { ################################################### # Show total no of sync jobs running -# Globals: 1 variable, 2 functions -# Variable - SYNC_LIST -# Functions - _get_job_info, _remove_job # Arguments: 1 # ${1} = v/verbose: Prints extra information ( optional ) -# Result: read description ################################################### _show_jobs() { declare list pid total=0 @@ -162,11 +148,8 @@ _show_jobs() { ################################################### # Setup required variables for a sync job -# Globals: 1 Variable -# SYNC_DETAIL_DIR # Arguments: 1 # ${1} = Local folder name which will be synced -# Result: read description ################################################### _setup_loop_variables() { declare folder="${1}" drive_folder="${2}" @@ -179,10 +162,6 @@ _setup_loop_variables() { ################################################### # Create folder and files for a sync job -# Globals: 4 variables -# DIRECTORY, PID_FILE, SUCCESS_LOG, ERROR_LOG -# Arguments: None -# Result: read description ################################################### _setup_loop_files() { mkdir -p "${DIRECTORY}" @@ -195,10 +174,6 @@ _setup_loop_files() { ################################################### # Check for new files in the sync folder and upload it # A list is generated everytime, success and error. -# Globals: 4 variables -# SUCCESS_LOG, ERROR_LOG, COMMAND_NAME, ARGS, GDRIVE_FOLDER -# Arguments: None -# Result: read description ################################################### _check_and_upload() { declare all initial new_files new_file @@ -232,11 +207,6 @@ _check_and_upload() { ################################################### # Loop _check_and_upload function, sleep for sometime in between -# Globals: 1 variable, 1 function -# Variable - SYNC_TIME_TO_SLEEP -# Function - _check_and_upload -# Arguments: None -# Result: read description ################################################### _loop() { while :; do @@ -247,10 +217,6 @@ _loop() { ################################################### # Check if a loop exists with given input -# Globals: 3 variables, 3 function -# Variable - FOLDER, PID, GDRIVE_FOLDER -# Function - _setup_loop_variables, _setup_loop_files, _check_pid -# Arguments: None # Result: return 0 - No existing loop, 1 - loop exists, 2 - loop only in database # if return 2 - then remove entry from database ################################################### @@ -273,12 +239,7 @@ _check_existing_loop() { ################################################### # Start a new sync job by _loop function # Print sync job information -# Globals: 7 variables, 1 function -# Variable - LOGS, PID_FILE, INPUT, GDRIVE_FOLDER, FOLDER, SYNC_LIST, FOREGROUND -# Function - _loop -# Arguments: None -# Result: read description -# Show logs at last and don't hangup if SHOW_LOGS is set +# Result: Show logs at last and don't hangup if SHOW_LOGS is set ################################################### _start_new_loop() { if [[ -n ${FOREGROUND} ]]; then @@ -301,11 +262,6 @@ _start_new_loop() { ################################################### # Triggers in case either -j & -k or -l flag ( both -k|-j if with positive integer as argument ) # Priority: -j > -i > -l > -k -# Globals: 5 variables, 6 functions -# Variables - JOB, SHOW_JOBS_VERBOSE, INFO_PID, LOG_PID, KILL_PID ( all array ) -# Functions - _check_pid, _setup_loop_variables -# _kill_job, _show_jobs, _get_job_info, _remove_job -# Arguments: None # Result: show either job info, individual info or kill job(s) according to set global variables. # Script exits after -j and -k if kill all is triggered ) ################################################### @@ -366,9 +322,6 @@ _do_job() { ################################################### # Process all arguments given to the script -# Globals: 1 variable, 3 functions -# Variable - HOME -# Functions - _kill_jobs, _show_jobs, _get_job_info # Arguments: Many # ${@} = Flags with arguments # Result: On @@ -483,11 +436,6 @@ _setup_arguments() { ################################################### # Grab config variables and modify defaults if necessary -# Globals: 5 variables, 2 functions -# Variables - INFO_PATH, UPDATE_DEFAULT_CONFIG, DEFAULT_ARGS -# UPDATE_DEFAULT_ARGS, UPDATE_DEFAULT_TIME_TO_SLEEP, TIME_TO_SLEEP -# Functions - _print_center, _update_config -# Arguments: None # Result: grab COMMAND_NAME, INSTALL_PATH, and CONFIG # source CONFIG, update default values if required ################################################### @@ -520,9 +468,6 @@ _config_variables() { ################################################### # Print systemd service file contents -# Globals: 5 variables -# Variables - LOGNAME, INSTALL_PATH, COMMAND_NAME, SYNC_COMMAND_NAME, ALL_ARGUMNETS -# Arguments: None ################################################### _systemd_service_contents() { declare username="${LOGNAME:?Give username}" install_path="${INSTALL_PATH:?Missing install path}" \ @@ -561,7 +506,6 @@ WantedBy=multi-user.target ################################################### # Create systemd service wrapper script for managing the service -# Globals: None # Arguments: 3 # ${1} = Service name # ${1} = Service file contents @@ -688,11 +632,6 @@ done' ################################################### # Process all the values in "${FINAL_INPUT_ARRAY[@]}" -# Globals: 20 variables, 15 functions -# Variables - FINAL_INPUT_ARRAY ( array ), DEFAULT_ACCOUNT, ROOT_FOLDER_NAME, GDRIVE_FOLDER -# PID_FILE, SHOW_LOGS, LOGS, KILL, INFO, CREATE_SERVICE, ARGS, SERVICE_NAME -# Functions - _set_value, _systemd_service_script, _systemd_service_contents, _print_center, _check_existing_loop, _start_new_loop -# Arguments: None # Result: Start the sync jobs for given folders, if running already, don't start new. # If a pid is detected but not running, remove that job. # If service script is going to be created then don,t touch the jobs @@ -745,11 +684,11 @@ _process_arguments() { main() { [[ $# = 0 ]] && _short_help - set -o errexit -o noclobber -o pipefail + set -o noclobber -o pipefail [[ -z ${SELF_SOURCE} ]] && { UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" - { . "${UTILS_FOLDER}"/common-utils.bash; } || { printf "Error: Unable to source util files.\n" && exit 1; } + { . "${UTILS_FOLDER}"/bash/common-utils.bash && . "${UTILS_FOLDER}"/common/common-utils.sh; } || { printf "Error: Unable to source util files.\n" && exit 1; } } trap '' TSTP # ignore ctrl + z diff --git a/sh/sync.sh b/src/sync.sh old mode 100755 new mode 100644 similarity index 92% rename from sh/sync.sh rename to src/sync.sh index 7fc6a23..7fb5355 --- a/sh/sync.sh +++ b/src/sync.sh @@ -42,7 +42,6 @@ _short_help() { ################################################### # Check if a pid exists by using ps -# Globals: None # Arguments: 1 # ${1}" = pid number of a sync job # Result: return 0 or 1 @@ -53,14 +52,10 @@ _check_pid() { ################################################### # Show information about a specific sync job -# Globals: 1 variable, 1 function -# Variable - SYNC_LIST -# Functions - _setup_loop_variables # Arguments: 1 # ${1}" = pid number of a sync job # ${2}" = anything: Prints extra information ( optional ) # ${3}" = all information about a job ( optional ) -# Result: read description ################################################### _get_job_info() { unset local_folder_get_job_info times_get_job_info extra_get_job_info @@ -95,11 +90,8 @@ _get_job_info() { ################################################### # Remove a sync job information from database -# Globals: 2 variables -# SYNC_LIST, SYNC_DETAIL_DIR # Arguments: 1 # ${1} = pid number of a sync job -# Result: read description ################################################### _remove_job() { unset input_remove_job local_folder_remove_job drive_folder_remove_job new_list_remove_job @@ -121,11 +113,8 @@ _remove_job() { ################################################### # Kill a sync job and do _remove_job -# Globals: 1 function -# _remove_job # Arguments: 1 # ${1}" = pid number of a sync job -# Result: read description ################################################### _kill_job() { pid_kill_job="${1}" @@ -136,12 +125,8 @@ _kill_job() { ################################################### # Show total no of sync jobs running -# Globals: 1 variable, 2 functions -# Variable - SYNC_LIST -# Functions - _get_job_info, _remove_job # Arguments: 1 # ${1}" = v/verbose: Prints extra information ( optional ) -# Result: read description ################################################### _show_jobs() { unset list_show_job pid_show_job no_task_show_job @@ -152,7 +137,11 @@ _show_jobs() { if [ -n "${line}" ]; then _tmp="${line%%"|:_//_:|"*}" && pid_show_job="${_tmp##*: }" _get_job_info "${pid_show_job}" "${1}" "${line}" - { [ "${RETURN_STATUS}" = 1 ] && _remove_job "${pid_show_job}"; } || { total_show_job="$((total_show_job + 1))" && no_task_show_job="printf"; } + if [ "${RETURN_STATUS}" = 1 ]; then + _remove_job "${pid_show_job}" + else + total_show_job="$((total_show_job + 1))" && no_task_show_job="printf" + fi fi done 4< "${SYNC_LIST}" @@ -163,11 +152,8 @@ _show_jobs() { ################################################### # Setup required variables for a sync job -# Globals: 1 Variable -# SYNC_DETAIL_DIR # Arguments: 1 # ${1}" = Local folder name which will be synced -# Result: read description ################################################### _setup_loop_variables() { folder_setup_loop_variables="${1}" drive_folder_setup_loop_variables="${2}" @@ -180,10 +166,6 @@ _setup_loop_variables() { ################################################### # Create folder and files for a sync job -# Globals: 4 variables -# DIRECTORY, PID_FILE, SUCCESS_LOG, ERROR_LOG -# Arguments: None -# Result: read description ################################################### _setup_loop_files() { mkdir -p "${DIRECTORY}" @@ -196,11 +178,6 @@ _setup_loop_files() { ################################################### # Check for new files in the sync folder and upload it # A list is generated everytime, success and error. -# Globals: 4 variables, 1 function -# Variables - SUCCESS_LOG, ERROR_LOG, COMMAND_NAME, ARGS, GDRIVE_FOLDER -# Function - _remove_array_duplicates -# Arguments: None -# Result: read description ################################################### _check_and_upload() { unset all_check_and_upload initial_check_and_upload new_files_check_and_upload new_file_check_and_upload aseen_check_and_upload @@ -249,11 +226,6 @@ EOF ################################################### # Loop _check_and_upload function, sleep for sometime in between -# Globals: 1 variable, 1 function -# Variable - SYNC_TIME_TO_SLEEP -# Function - _check_and_upload -# Arguments: None -# Result: read description ################################################### _loop() { while :; do @@ -264,10 +236,6 @@ _loop() { ################################################### # Check if a loop exists with given input -# Globals: 3 variables, 3 function -# Variable - FOLDER, PID, GDRIVE_FOLDER -# Function - _setup_loop_variables, _setup_loop_files, _check_pid -# Arguments: None # Result: return 0 - No existing loop, 1 - loop exists, 2 - loop only in database # if return 2 - then remove entry from database ################################################### @@ -290,12 +258,7 @@ _check_existing_loop() { ################################################### # Start a new sync job by _loop function # Print sync job information -# Globals: 7 variables, 1 function -# Variable - LOGS, PID_FILE, INPUT, GDRIVE_FOLDER, FOLDER, SYNC_LIST, FOREGROUND -# Function - _loop -# Arguments: None -# Result: read description -# Show logs at last and don't hangup if SHOW_LOGS is set +# Show logs at last and don't hangup if SHOW_LOGS is set ################################################### _start_new_loop() { if [ -n "${FOREGROUND}" ]; then @@ -317,11 +280,6 @@ _start_new_loop() { ################################################### # Triggers in case either -j & -k or -l flag ( both -k|-j if with positive integer as argument ) # Priority: -j > -i > -l > -k -# Globals: 5 variables, 6 functions -# Variables - JOB, SHOW_JOBS_VERBOSE, INFO_PID, LOG_PID, KILL_PID ( all array ) -# Functions - _check_pid, _setup_loop_variables -# _kill_job, _show_jobs, _get_job_info, _remove_job -# Arguments: None # Result: show either job info, individual info or kill job(s) according to set global variables. # Script exits after -j and -k if kill all is triggered ) ################################################### @@ -357,6 +315,7 @@ _do_job() { printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 } ;; + *) ;; esac case "${JOB_TYPE}" in *SHOW_LOGS*) @@ -371,6 +330,7 @@ _do_job() { printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 fi ;; + *) ;; esac case "${JOB_TYPE}" in *KILL*) @@ -382,23 +342,23 @@ _do_job() { printf "No job running with given PID ( %s ).\n" "${pid}" 1>&2 fi ;; + *) ;; esac done 4<< EOF $(printf "%s\n" "${ALL_PIDS}") EOF case "${JOB_TYPE}" in *INFO* | *SHOW_LOGS* | *KILL*) exit 0 ;; + *) ;; esac ;; + *) ;; esac return 0 } ################################################### # Process all arguments given to the script -# Globals: 1 variable, 4 functions -# Variable - HOME -# Functions - _kill_jobs, _show_jobs, _get_job_info, _remove_array_duplicates # Arguments: Many # ${@} = Flags with arguments # Result: On @@ -429,6 +389,7 @@ _setup_arguments() { -j | --jobs) case "${2}" in v*) SHOW_JOBS_VERBOSE="true" && shift ;; + *) ;; esac JOB="SHOW_JOBS" ;; @@ -454,6 +415,7 @@ _setup_arguments() { if [ "${2}" -gt 0 ] 2>| /dev/null 1>&2; then case "${2}" in default*) UPDATE_DEFAULT_TIME_TO_SLEEP="_update_config" ;; + *) ;; esac TO_SLEEP="${2##default=/}" && shift else @@ -465,6 +427,7 @@ _setup_arguments() { _check_longoptions "${1}" "${2}" case "${2}" in default*) UPDATE_DEFAULT_ARGS="_update_config" ;; + *) ;; esac ARGS=" ${ARGS} ${2##default=} " && shift ;; @@ -521,11 +484,6 @@ _setup_arguments() { ################################################### # Grab config variables and modify defaults if necessary -# Globals: 5 variables, 2 functions -# Variables - INFO_PATH, UPDATE_DEFAULT_CONFIG, DEFAULT_ARGS -# UPDATE_DEFAULT_ARGS, UPDATE_DEFAULT_TIME_TO_SLEEP, TIME_TO_SLEEP -# Functions - _print_center, _update_config -# Arguments: None # Result: grab COMMAND_NAME, INSTALL_PATH, and CONFIG # source CONFIG, update default values if required ################################################### @@ -558,7 +516,6 @@ _config_variables() { ################################################### # Print systemd service file contents -# Globals: 5 variables # Variables - LOGNAME, INSTALL_PATH, COMMAND_NAME, SYNC_COMMAND_NAME, ALL_ARGUMNETS # Arguments: None ################################################### @@ -599,7 +556,6 @@ WantedBy=multi-user.target ################################################### # Create systemd service wrapper script for managing the service -# Globals: None # Arguments: 3 # ${1} = Service name # ${1} = Service file contents @@ -725,11 +681,7 @@ done' ################################################### # Process all the values in "${FINAL_INPUT_ARRAY[@]}" -# Globals: 20 variables, 15 functions -# Variables - FINAL_INPUT_ARRAY ( array ), DEFAULT_ACCOUNT, ROOT_FOLDER_NAME, GDRIVE_FOLDER -# PID_FILE, SHOW_LOGS, LOGS, KILL, INFO, CREATE_SERVICE, ARGS, SERVICE_NAME # Functions - _set_value, _systemd_service_script, _systemd_service_contents, _print_center, _check_existing_loop, _start_new_loop -# Arguments: None # Result: Start the sync jobs for given folders, if running already, don't start new. # If a pid is detected but not running, remove that job. # If service script is going to be created then don,t touch the jobs @@ -776,6 +728,7 @@ _process_arguments() { [ -n "${KILL}" ] && _kill_job "${PID}" && exit [ -n "${SHOW_LOGS}" ] && tail -f "${LOGS}" ;; + *) ;; esac cd "${current_folder_process_arguments}" || exit 1 done 4<< EOF @@ -787,11 +740,11 @@ EOF main() { [ $# = 0 ] && _short_help - set -o errexit -o noclobber + set -o noclobber if [ -z "${SELF_SOURCE}" ]; then - UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" && SOURCE_UTILS=". '${UTILS_FOLDER}/common-utils.sh'" - eval "${SOURCE_UTILS}" || { printf "Error: Unable to source util files.\n" && exit 1; } + UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" + { . "${UTILS_FOLDER}"/sh/common-utils.sh && . "${UTILS_FOLDER}"/common/common-utils.sh; } || { printf "Error: Unable to source util files.\n" && exit 1; } fi trap '' TSTP # ignore ctrl + z diff --git a/src/upload.bash b/src/upload.bash new file mode 100644 index 0000000..4bb613f --- /dev/null +++ b/src/upload.bash @@ -0,0 +1,40 @@ +#!/usr/bin/env bash +# Upload a file to Google Drive +# shellcheck source=/dev/null + +main() { + [[ $# = 0 ]] && { + printf "No valid arguments provided, use -h/--help flag to see usage.\n" + exit 0 + } + + [[ -z ${SELF_SOURCE} ]] && { + # this is to export the functions so that can used in parallel functions + set -a + export UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" + export COMMON_PATH="${UTILS_FOLDER}/common" + { . "${UTILS_FOLDER}/bash/common-utils.bash" && + . "${COMMON_PATH}/parser.sh" && + . "${COMMON_PATH}/upload-flags.sh" && + . "${COMMON_PATH}/auth-utils.sh" && + . "${COMMON_PATH}/common-utils.sh" && + . "${COMMON_PATH}/drive-utils.sh" && + . "${COMMON_PATH}/upload-utils.sh" && + . "${COMMON_PATH}/upload-common.sh"; } || + { printf "Error: Unable to source util files.\n" && exit 1; } + set +a + } + # this var is used for posix scripts in download folder function inside xargs, but we don't need that here + export SOURCE_UTILS="" + + [[ ${BASH_VERSINFO:-0} -ge 4 ]] || { printf "Bash version lower than 4.x not supported.\n" && return 1; } + set -o noclobber -o pipefail || exit 1 + + # the kill signal which is used to kill the whole script and children in case of ctrl + c + export _SCRIPT_KILL_SIGNAL="--" + + # execute the main helper function which does the rest of stuff + _main_helper "${@}" || exit 1 +} + +{ [[ -z ${SOURCED_GUPLOAD} ]] && main "${@}"; } || : diff --git a/src/upload.sh b/src/upload.sh new file mode 100644 index 0000000..92f6037 --- /dev/null +++ b/src/upload.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env sh +# Upload a file to Google Drive +# shellcheck source=/dev/null + +main() { + [ $# = 0 ] && { + printf "No valid arguments provided, use -h/--help flag to see usage.\n" + exit 0 + } + + export _SHELL="sh" + if [ -z "${SELF_SOURCE}" ]; then + export UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}" + export COMMON_PATH="${UTILS_FOLDER}/common" + # shellcheck disable=SC2089 + export SOURCE_UTILS=". '${UTILS_FOLDER}/sh/common-utils.sh' && + . '${COMMON_PATH}/parser.sh' && + . '${COMMON_PATH}/flags.sh' && + . '${COMMON_PATH}/auth-utils.sh' && + . '${COMMON_PATH}/common-utils.sh' && + . '${COMMON_PATH}/drive-utils.sh' && + . '${COMMON_PATH}/upload-utils.sh' + . '${COMMON_PATH}/upload-common.sh'" + else + SCRIPT_PATH="$(cd "$(_dirname "${0}")" && pwd)/${0##*\/}" && export SCRIPT_PATH + # shellcheck disable=SC2090 + export SOURCE_UTILS="SOURCED_GUPLOAD=true . '${SCRIPT_PATH}'" + fi + eval "${SOURCE_UTILS}" || { printf "Error: Unable to source util files.\n" && exit 1; } + + set -o noclobber + + # the kill signal which is used to kill the whole script and children in case of ctrl + c + export _SCRIPT_KILL_SIGNAL="-9" + + # execute the main helper function which does the rest of stuff + _main_helper "${@}" || exit 1 +} + +{ [ -z "${SOURCED_GUPLOAD}" ] && main "${@}"; } || :