diff --git a/README.md b/README.md
index 0c49e43..f780379 100755
--- a/README.md
+++ b/README.md
@@ -114,6 +114,7 @@ This repo contains two types of scripts, posix compatible and bash compatible.
| sed | Miscellaneous |
| mktemp | To generate temporary files ( optional ) |
| sleep | Self explanatory |
+| ps | To manage different processes |
If BASH is not available or BASH is available but version is less tham 4.x, then below programs are also required:
@@ -124,11 +125,10 @@ This repo contains two types of scripts, posix compatible and bash compatible.
| cat | Miscellaneous |
| stty or zsh or tput | To determine column size ( optional ) |
-These programs are needed for synchronisation script:
+These are the additional programs needed for synchronisation script:
| Program | Role In Script |
| ------------- | ------------------------- |
-| ps | To manage background jobs |
| tail | To show indefinite logs |
### Installation
diff --git a/bash/drive-utils.bash b/bash/drive-utils.bash
index f928cd1..71c067e 100755
--- a/bash/drive-utils.bash
+++ b/bash/drive-utils.bash
@@ -1,4 +1,16 @@
#!/usr/bin/env bash
+# shellcheck source=/dev/null
+
+###################################################
+# A simple wrapper to check tempfile for access token and make authorized oauth requests to drive api
+###################################################
+_api_request() {
+ . "${TMPFILE}_ACCESS_TOKEN"
+
+ curl --compressed \
+ -H "Authorization: Bearer ${ACCESS_TOKEN}" \
+ "${@}"
+}
###################################################
# Method to regenerate access_token ( also updates in config ).
@@ -12,8 +24,7 @@ _get_access_token_and_update() {
RESPONSE="${1:-$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")}" || :
if ACCESS_TOKEN="$(_json_value access_token 1 1 <<< "${RESPONSE}")"; then
_update_config ACCESS_TOKEN "${ACCESS_TOKEN}" "${CONFIG}"
- { ACCESS_TOKEN_EXPIRY="$(curl --compressed -s "${API_URL}/oauth2/${API_VERSION}/tokeninfo?access_token=${ACCESS_TOKEN}" | _json_value exp 1 1)" &&
- _update_config ACCESS_TOKEN_EXPIRY "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}"; } || { "${QUIET:-_print_center}" "justify" "Error: Couldn't update" " access token expiry." 1>&2 && return 1; }
+ _update_config ACCESS_TOKEN_EXPIRY "$(($(printf "%(%s)T\\n" "-1") + $(_json_value expires_in 1 1 <<< "${RESPONSE}") - 1))" "${CONFIG}"
else
"${QUIET:-_print_center}" "justify" "Error: Something went wrong" ", printing error." 1>&2
printf "%s\n" "${RESPONSE}" 1>&2
@@ -74,13 +85,12 @@ _error_logging_upload() {
###################################################
# Get information for a gdrive folder/file.
-# Globals: 2 variables, 1 function
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 1 function
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = folder/file gdrive id
# ${2} = information to fetch, e.g name, id
-# ${3} = Access Token
# Result: On
# Success - print fetched value
# Error - print "message" field from the json
@@ -88,13 +98,11 @@ _error_logging_upload() {
# https://developers.google.com/drive/api/v3/search-files
###################################################
_drive_info() {
- [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare folder_id="${1}" fetch="${2}" token="${3}"
- declare search_response
+ [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
+ declare folder_id="${1}" fetch="${2}" search_response
"${EXTRA_LOG}" "justify" "Fetching info.." "-" 1>&2
- search_response="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token}" \
+ search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files/${folder_id}?fields=${fetch}&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
_clear_line 1 1>&2
@@ -104,27 +112,24 @@ _drive_info() {
###################################################
# Search for an existing file on gdrive with write permission.
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 2 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = file name
# ${2} = root dir id of file
-# ${3} = Access Token
# Result: print file id else blank
# Reference:
# https://developers.google.com/drive/api/v3/search-files
###################################################
_check_existing_file() {
- [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare name="${1##*/}" rootdir="${2}" token="${3}"
- declare query search_response id
+ [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
+ declare name="${1##*/}" rootdir="${2}" query search_response id
"${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2
query="$(_url_encode "name='${name}' and '${rootdir}' in parents and trashed=false")"
- search_response="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token}" \
+ search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id,name,mimeType)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
_clear_line 1 1>&2
@@ -134,35 +139,31 @@ _check_existing_file() {
###################################################
# Create/Check directory in google drive.
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 2 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = dir name
# ${2} = root dir id of given dir
-# ${3} = Access Token
# Result: print folder id
# Reference:
# https://developers.google.com/drive/api/v3/folder
###################################################
_create_directory() {
- [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare dirname="${1##*/}" rootdir="${2}" token="${3}"
- declare query search_response folder_id
+ [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
+ declare dirname="${1##*/}" rootdir="${2}" query search_response folder_id
"${EXTRA_LOG}" "justify" "Creating gdrive folder:" " ${dirname}" "-" 1>&2
query="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name='${dirname}' and trashed=false and '${rootdir}' in parents")"
- search_response="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token}" \
+ search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
if ! folder_id="$(printf "%s\n" "${search_response}" | _json_value id 1 1)"; then
declare create_folder_post_data create_folder_response
create_folder_post_data="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${dirname}\",\"parents\": [\"${rootdir}\"]}"
- create_folder_response="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ create_folder_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X POST \
- -H "Authorization: Bearer ${token}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${create_folder_post_data}" \
"${API_URL}/drive/${API_VERSION}/files?fields=id&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
@@ -179,9 +180,8 @@ _create_directory() {
# generate resumable upload link
_generate_upload_link() {
"${EXTRA_LOG}" "justify" "Generating upload link.." "-" 1>&2
- uploadlink="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ uploadlink="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X "${request_method}" \
- -H "Authorization: Bearer ${token}" \
-H "Content-Type: application/json; charset=UTF-8" \
-H "X-Upload-Content-Type: ${mime_type}" \
-H "X-Upload-Content-Length: ${inputsize}" \
@@ -202,9 +202,8 @@ _generate_upload_link() {
_upload_file_from_uri() {
_print_center "justify" "Uploading.." "-"
# shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic.
- upload_body="$(curl --compressed ${CURL_PROGRESS} \
+ upload_body="$(_api_request ${CURL_PROGRESS} \
-X PUT \
- -H "Authorization: Bearer ${token}" \
-H "Content-Type: ${mime_type}" \
-H "Content-Length: ${content_length}" \
-H "Slug: ${slug}" \
@@ -252,16 +251,15 @@ _full_upload() {
###################################################
# Upload ( Create/Update ) files on gdrive.
# Interrupted uploads can be resumed.
-# Globals: 7 variables, 10 functions
-# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID
+# Globals: 8 variables, 10 functions
+# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN
# Functions - _url_encode, _json_value, _print_center, _bytes_to_human
# _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session
# _full_upload, _collect_file_info
-# Arguments: 5
+# Arguments: 3
# ${1} = update or upload ( upload type )
# ${2} = file to upload
# ${3} = root dir id for file
-# ${4} = Access Token
# Result: On
# Success - Upload/Update file and export FILE_ID
# Error - return 1
@@ -271,9 +269,9 @@ _full_upload() {
# https://developers.google.com/drive/api/v3/reference/files/update
###################################################
_upload_file() {
- [[ $# -lt 4 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare job="${1}" input="${2}" folder_id="${3}" token="${4}"
- declare slug inputname extension inputsize readable_size request_method url postdata uploadlink upload_body mime_type resume_args1 resume_args2 resume_args3
+ [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
+ declare job="${1}" input="${2}" folder_id="${3}" \
+ slug inputname extension inputsize readable_size request_method url postdata uploadlink upload_body mime_type resume_args1 resume_args2 resume_args3
slug="${input##*/}"
inputname="${slug%.*}"
@@ -295,7 +293,7 @@ _upload_file() {
[[ ${job} = update ]] && {
declare file_check_json
# Check if file actually exists, and create if not.
- if file_check_json="$(_check_existing_file "${slug}" "${folder_id}" "${token}")"; then
+ if file_check_json="$(_check_existing_file "${slug}" "${folder_id}")"; then
if [[ -n ${SKIP_DUPLICATES} ]]; then
# Stop upload if already exists ( -d/--skip-duplicates )
_collect_file_info "${file_check_json}" "${slug}" || return 1
@@ -367,8 +365,8 @@ _upload_file() {
###################################################
# A extra wrapper for _upload_file function to properly handle retries
# also handle uploads in case uploading from folder
-# Globals: 3 variables, 1 function
-# Variables - RETRY, UPLOAD_MODE and ACCESS_TOKEN
+# Globals: 2 variables, 1 function
+# Variables - RETRY, UPLOAD_MODE
# Functions - _upload_file
# Arguments: 3
# ${1} = parse or norparse
@@ -384,9 +382,9 @@ _upload_file_main() {
retry="${RETRY:-0}" && unset RETURN_STATUS
until [[ ${retry} -le 0 ]] && [[ -n ${RETURN_STATUS} ]]; do
if [[ -n ${4} ]]; then
- _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" "${ACCESS_TOKEN}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break
+ _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break
else
- _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" "${ACCESS_TOKEN}" && RETURN_STATUS=1 && break
+ _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" && RETURN_STATUS=1 && break
fi
RETURN_STATUS=2 retry="$((retry - 1))" && continue
done
@@ -396,8 +394,8 @@ _upload_file_main() {
###################################################
# Upload all files in the given folder, parallelly or non-parallely and show progress
-# Globals: 2 variables, 3 functions
-# Variables - VERBOSE and VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET
+# Globals: 7 variables, 3 functions
+# Variables - VERBOSE, VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET
# Functions - _clear_line, _newline, _print_center and _upload_file_main
# Arguments: 4
# ${1} = parallel or normal
@@ -456,16 +454,15 @@ _upload_folder() {
###################################################
# Copy/Clone a public gdrive file/folder from another/same gdrive account
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET
+# Globals: 6 variables, 2 functions
+# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN
# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line
# Arguments: 5
# ${1} = update or upload ( upload type )
# ${2} = file id to upload
# ${3} = root dir id for file
-# ${4} = Access Token
-# ${5} = name of file
-# ${6} = size of file
+# ${4} = name of file
+# ${5} = size of file
# Result: On
# Success - Upload/Update file and export FILE_ID
# Error - return 1
@@ -473,8 +470,8 @@ _upload_folder() {
# https://developers.google.com/drive/api/v2/reference/files/copy
###################################################
_clone_file() {
- [[ $# -lt 4 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare job="${1}" file_id="${2}" file_root_id="${3}" token="${4}" name="${5}" size="${6}"
+ [[ $# -lt 5 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
+ declare job="${1}" file_id="${2}" file_root_id="${3}" name="${4}" size="${5}"
declare clone_file_post_data clone_file_response readable_size _file_id && STRING="Cloned"
clone_file_post_data="{\"parents\": [\"${file_root_id}\"]}"
readable_size="$(_bytes_to_human "${size}")"
@@ -484,7 +481,7 @@ _clone_file() {
if [[ ${job} = update ]]; then
declare file_check_json
# Check if file actually exists.
- if file_check_json="$(_check_existing_file "${name}" "${file_root_id}" "${token}")"; then
+ if file_check_json="$(_check_existing_file "${name}" "${file_root_id}")"; then
if [[ -n ${SKIP_DUPLICATES} ]]; then
_collect_file_info "${file_check_json}" || return 1
_clear_line 1
@@ -492,12 +489,11 @@ _clone_file() {
else
_print_center "justify" "Overwriting file.." "-"
{ _file_id="$(_json_value id 1 1 <<< "${file_check_json}")" &&
- clone_file_post_data="$(_drive_info "${_file_id}" "parents,writersCanShare" "${token}")"; } ||
+ clone_file_post_data="$(_drive_info "${_file_id}" "parents,writersCanShare")"; } ||
{ _error_logging_upload "${name}" "${post_data:-${file_check_json}}" && return 1; }
if [[ ${_file_id} != "${file_id}" ]]; then
- curl --compressed -s \
+ _api_request -s \
-X DELETE \
- -H "Authorization: Bearer ${token}" \
"${API_URL}/drive/${API_VERSION}/files/${_file_id}?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>| /dev/null 1>&2 || :
STRING="Updated"
else
@@ -512,9 +508,8 @@ _clone_file() {
fi
# shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic.
- clone_file_response="$(curl --compressed ${CURL_PROGRESS} \
+ clone_file_response="$(_api_request ${CURL_PROGRESS} \
-X POST \
- -H "Authorization: Bearer ${token}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${clone_file_post_data}" \
"${API_URL}/drive/${API_VERSION}/files/${file_id}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)"
@@ -526,28 +521,26 @@ _clone_file() {
###################################################
# Share a gdrive file/folder
-# Globals: 2 variables, 4 functions
-# Variables - API_URL and API_VERSION
+# Globals: 3 variables, 4 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value, _print_center, _clear_line
-# Arguments: 3
+# Arguments: 2
# ${1} = gdrive ID of folder/file
-# ${2} = Access Token
-# ${3} = Email to which file will be shared ( optional )
+# ${2} = Email to which file will be shared ( optional )
# Result: read description
# Reference:
# https://developers.google.com/drive/api/v3/manage-sharing
###################################################
_share_id() {
[[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare id="${1}" token="${2}" share_email="${3}" role="reader" type="${share_email:+user}"
+ declare id="${1}" share_email="${2}" role="reader" type="${share_email:+user}"
declare type share_post_data share_post_data share_response
"${EXTRA_LOG}" "justify" "Sharing.." "-" 1>&2
share_post_data="{\"role\":\"${role}\",\"type\":\"${type:-anyone}\"${share_email:+,\\\"emailAddress\\\":\\\"${share_email}\\\"}}"
- share_response="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ share_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X POST \
- -H "Authorization: Bearer ${token}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${share_post_data}" \
"${API_URL}/drive/${API_VERSION}/files/${id}/permissions?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
diff --git a/bash/google-oauth2.bash b/bash/google-oauth2.bash
index c3de1d9..b02e870 100755
--- a/bash/google-oauth2.bash
+++ b/bash/google-oauth2.bash
@@ -25,16 +25,21 @@ Usage:
exit 0
}
+###################################################
# Method to regenerate access_token ( also updates in config ).
# Make a request on https://www.googleapis.com/oauth2/""${API_VERSION}""/tokeninfo?access_token=${ACCESS_TOKEN} url and check if the given token is valid, if not generate one.
-# Requirements: Refresh Token
-_get_token_and_update() {
+# Globals: 8 variables, 2 functions
+# Variables - CLIENT_ID, CLIENT_SECRET, REFRESH_TOKEN, TOKEN_URL, CONFIG, API_URL, API_VERSION and QUIET
+# Functions - _update_config and _print_center
+# Result: Update access_token and expiry else print error
+###################################################
+_get_access_token_and_update() {
RESPONSE="${1:-$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")}" || :
if ACCESS_TOKEN="$(_json_value access_token 1 1 <<< "${RESPONSE}")"; then
- { [[ -n ${UPDATE} ]] && ACCESS_TOKEN_EXPIRY="$(curl --compressed -s "${API_URL}/oauth2/${API_VERSION}/tokeninfo?access_token=${ACCESS_TOKEN}" | _json_value exp 1 1)" &&
- _update_config ACCESS_TOKEN_EXPIRY "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}"; } || { "${QUIET:-_print_center}" "justify" "Error: Couldn't update" " access token expiry." 1>&2 && exit 1; }
- "${UPDATE:-:}" ACCESS_TOKEN "${ACCESS_TOKEN}" "${CONFIG}"
- "${UPDATE:-:}" ACCESS_TOKEN_EXPIRY "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}"
+ [[ -n ${UPDATE} ]] && {
+ _update_config ACCESS_TOKEN "${ACCESS_TOKEN}" "${CONFIG}"
+ _update_config ACCESS_TOKEN_EXPIRY "$(($(printf "%(%s)T\\n" "-1") + $(_json_value expires_in 1 1 <<< "${RESPONSE}")))" "${CONFIG}"
+ }
else
_print_center "justify" "Error: Something went wrong" ", printing error." 1>&2
printf "%s\n" "${RESPONSE}" 1>&2
@@ -45,10 +50,10 @@ _get_token_and_update() {
[[ ${1} = create ]] || [[ ${1} = refresh ]] || _short_help
-[[ ${2} = update ]] && UPDATE="_update_config"
+{ [[ ${2} = update ]] && UPDATE="true"; } || unset UPDATE
UTILS_FOLDER="${UTILS_FOLDER:-$(pwd)}"
-{ . "${UTILS_FOLDER}"/common-utils.sh; } || { printf "Error: Unable to source util files.\n" && exit 1; }
+{ . "${UTILS_FOLDER}"/common-utils.bash; } || { printf "Error: Unable to source util files.\n" && exit 1; }
_check_debug
@@ -68,7 +73,7 @@ CONFIG="${CONFIG:-${HOME}/.googledrive.conf}"
# shellcheck source=/dev/null
[[ -f ${CONFIG} ]] && source "${CONFIG}"
-! _is_terminal && [[ -z ${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}} ]] && {
+! [[ -t 2 ]] && [[ -z ${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}} ]] && {
printf "%s\n" "Error: Script is not running in a terminal, cannot ask for credentials."
printf "%s\n" "Add in config manually if terminal is not accessible. CLIENT_ID, CLIENT_SECRET and REFRESH_TOKEN is required." && return 1
}
@@ -103,7 +108,7 @@ if [[ ${1} = create ]]; then
--data "code=${CODE}&client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&redirect_uri=${REDIRECT_URI}&grant_type=authorization_code" "${TOKEN_URL}")" || :
REFRESH_TOKEN="$(_json_value refresh_token 1 1 <<< "${RESPONSE}" || :)"
- if _get_token_and_update "${RESPONSE}"; then
+ if _get_access_token_and_update "${RESPONSE}"; then
"${UPDATE:-:}" REFRESH_TOKEN "${REFRESH_TOKEN}" "${CONFIG}"
printf "Access Token: %s\n" "${ACCESS_TOKEN}"
printf "Refresh Token: %s\n" "${REFRESH_TOKEN}"
@@ -111,7 +116,7 @@ if [[ ${1} = create ]]; then
elif [[ ${1} = refresh ]]; then
if [[ -n ${REFRESH_TOKEN} ]]; then
_print_center "justify" "Required credentials set." "="
- _get_token_and_update
+ _get_access_token_and_update
_clear_line 1
printf "Access Token: %s\n" "${ACCESS_TOKEN}"
else
diff --git a/bash/release/gupload b/bash/release/gupload
index 5cf2d52..d42c3ab 100755
--- a/bash/release/gupload
+++ b/bash/release/gupload
@@ -371,6 +371,18 @@ _url_encode() {
done
printf '\n'
}
+# shellcheck source=/dev/null
+
+###################################################
+# A simple wrapper to check tempfile for access token and make authorized oauth requests to drive api
+###################################################
+_api_request() {
+ . "${TMPFILE}_ACCESS_TOKEN"
+
+ curl --compressed \
+ -H "Authorization: Bearer ${ACCESS_TOKEN}" \
+ "${@}"
+}
###################################################
# Method to regenerate access_token ( also updates in config ).
@@ -384,8 +396,7 @@ _get_access_token_and_update() {
RESPONSE="${1:-$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")}" || :
if ACCESS_TOKEN="$(_json_value access_token 1 1 <<< "${RESPONSE}")"; then
_update_config ACCESS_TOKEN "${ACCESS_TOKEN}" "${CONFIG}"
- { ACCESS_TOKEN_EXPIRY="$(curl --compressed -s "${API_URL}/oauth2/${API_VERSION}/tokeninfo?access_token=${ACCESS_TOKEN}" | _json_value exp 1 1)" &&
- _update_config ACCESS_TOKEN_EXPIRY "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}"; } || { "${QUIET:-_print_center}" "justify" "Error: Couldn't update" " access token expiry." 1>&2 && return 1; }
+ _update_config ACCESS_TOKEN_EXPIRY "$(($(printf "%(%s)T\\n" "-1") + $(_json_value expires_in 1 1 <<< "${RESPONSE}") - 1))" "${CONFIG}"
else
"${QUIET:-_print_center}" "justify" "Error: Something went wrong" ", printing error." 1>&2
printf "%s\n" "${RESPONSE}" 1>&2
@@ -446,13 +457,12 @@ _error_logging_upload() {
###################################################
# Get information for a gdrive folder/file.
-# Globals: 2 variables, 1 function
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 1 function
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = folder/file gdrive id
# ${2} = information to fetch, e.g name, id
-# ${3} = Access Token
# Result: On
# Success - print fetched value
# Error - print "message" field from the json
@@ -460,13 +470,11 @@ _error_logging_upload() {
# https://developers.google.com/drive/api/v3/search-files
###################################################
_drive_info() {
- [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare folder_id="${1}" fetch="${2}" token="${3}"
- declare search_response
+ [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
+ declare folder_id="${1}" fetch="${2}" search_response
"${EXTRA_LOG}" "justify" "Fetching info.." "-" 1>&2
- search_response="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token}" \
+ search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files/${folder_id}?fields=${fetch}&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
_clear_line 1 1>&2
@@ -476,27 +484,24 @@ _drive_info() {
###################################################
# Search for an existing file on gdrive with write permission.
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 2 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = file name
# ${2} = root dir id of file
-# ${3} = Access Token
# Result: print file id else blank
# Reference:
# https://developers.google.com/drive/api/v3/search-files
###################################################
_check_existing_file() {
- [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare name="${1##*/}" rootdir="${2}" token="${3}"
- declare query search_response id
+ [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
+ declare name="${1##*/}" rootdir="${2}" query search_response id
"${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2
query="$(_url_encode "name='${name}' and '${rootdir}' in parents and trashed=false")"
- search_response="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token}" \
+ search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id,name,mimeType)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
_clear_line 1 1>&2
@@ -506,35 +511,31 @@ _check_existing_file() {
###################################################
# Create/Check directory in google drive.
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 2 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = dir name
# ${2} = root dir id of given dir
-# ${3} = Access Token
# Result: print folder id
# Reference:
# https://developers.google.com/drive/api/v3/folder
###################################################
_create_directory() {
- [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare dirname="${1##*/}" rootdir="${2}" token="${3}"
- declare query search_response folder_id
+ [[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
+ declare dirname="${1##*/}" rootdir="${2}" query search_response folder_id
"${EXTRA_LOG}" "justify" "Creating gdrive folder:" " ${dirname}" "-" 1>&2
query="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name='${dirname}' and trashed=false and '${rootdir}' in parents")"
- search_response="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token}" \
+ search_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files?q=${query}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
if ! folder_id="$(printf "%s\n" "${search_response}" | _json_value id 1 1)"; then
declare create_folder_post_data create_folder_response
create_folder_post_data="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${dirname}\",\"parents\": [\"${rootdir}\"]}"
- create_folder_response="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ create_folder_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X POST \
- -H "Authorization: Bearer ${token}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${create_folder_post_data}" \
"${API_URL}/drive/${API_VERSION}/files?fields=id&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
@@ -551,9 +552,8 @@ _create_directory() {
# generate resumable upload link
_generate_upload_link() {
"${EXTRA_LOG}" "justify" "Generating upload link.." "-" 1>&2
- uploadlink="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ uploadlink="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X "${request_method}" \
- -H "Authorization: Bearer ${token}" \
-H "Content-Type: application/json; charset=UTF-8" \
-H "X-Upload-Content-Type: ${mime_type}" \
-H "X-Upload-Content-Length: ${inputsize}" \
@@ -574,9 +574,8 @@ _generate_upload_link() {
_upload_file_from_uri() {
_print_center "justify" "Uploading.." "-"
# shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic.
- upload_body="$(curl --compressed ${CURL_PROGRESS} \
+ upload_body="$(_api_request ${CURL_PROGRESS} \
-X PUT \
- -H "Authorization: Bearer ${token}" \
-H "Content-Type: ${mime_type}" \
-H "Content-Length: ${content_length}" \
-H "Slug: ${slug}" \
@@ -624,16 +623,15 @@ _full_upload() {
###################################################
# Upload ( Create/Update ) files on gdrive.
# Interrupted uploads can be resumed.
-# Globals: 7 variables, 10 functions
-# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID
+# Globals: 8 variables, 10 functions
+# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN
# Functions - _url_encode, _json_value, _print_center, _bytes_to_human
# _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session
# _full_upload, _collect_file_info
-# Arguments: 5
+# Arguments: 3
# ${1} = update or upload ( upload type )
# ${2} = file to upload
# ${3} = root dir id for file
-# ${4} = Access Token
# Result: On
# Success - Upload/Update file and export FILE_ID
# Error - return 1
@@ -643,9 +641,9 @@ _full_upload() {
# https://developers.google.com/drive/api/v3/reference/files/update
###################################################
_upload_file() {
- [[ $# -lt 4 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare job="${1}" input="${2}" folder_id="${3}" token="${4}"
- declare slug inputname extension inputsize readable_size request_method url postdata uploadlink upload_body mime_type resume_args1 resume_args2 resume_args3
+ [[ $# -lt 3 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
+ declare job="${1}" input="${2}" folder_id="${3}" \
+ slug inputname extension inputsize readable_size request_method url postdata uploadlink upload_body mime_type resume_args1 resume_args2 resume_args3
slug="${input##*/}"
inputname="${slug%.*}"
@@ -667,7 +665,7 @@ _upload_file() {
[[ ${job} = update ]] && {
declare file_check_json
# Check if file actually exists, and create if not.
- if file_check_json="$(_check_existing_file "${slug}" "${folder_id}" "${token}")"; then
+ if file_check_json="$(_check_existing_file "${slug}" "${folder_id}")"; then
if [[ -n ${SKIP_DUPLICATES} ]]; then
# Stop upload if already exists ( -d/--skip-duplicates )
_collect_file_info "${file_check_json}" "${slug}" || return 1
@@ -739,8 +737,8 @@ _upload_file() {
###################################################
# A extra wrapper for _upload_file function to properly handle retries
# also handle uploads in case uploading from folder
-# Globals: 3 variables, 1 function
-# Variables - RETRY, UPLOAD_MODE and ACCESS_TOKEN
+# Globals: 2 variables, 1 function
+# Variables - RETRY, UPLOAD_MODE
# Functions - _upload_file
# Arguments: 3
# ${1} = parse or norparse
@@ -756,9 +754,9 @@ _upload_file_main() {
retry="${RETRY:-0}" && unset RETURN_STATUS
until [[ ${retry} -le 0 ]] && [[ -n ${RETURN_STATUS} ]]; do
if [[ -n ${4} ]]; then
- _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" "${ACCESS_TOKEN}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break
+ _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break
else
- _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" "${ACCESS_TOKEN}" && RETURN_STATUS=1 && break
+ _upload_file "${UPLOAD_MODE:-create}" "${file}" "${dirid}" && RETURN_STATUS=1 && break
fi
RETURN_STATUS=2 retry="$((retry - 1))" && continue
done
@@ -768,8 +766,8 @@ _upload_file_main() {
###################################################
# Upload all files in the given folder, parallelly or non-parallely and show progress
-# Globals: 2 variables, 3 functions
-# Variables - VERBOSE and VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET
+# Globals: 7 variables, 3 functions
+# Variables - VERBOSE, VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET
# Functions - _clear_line, _newline, _print_center and _upload_file_main
# Arguments: 4
# ${1} = parallel or normal
@@ -828,16 +826,15 @@ _upload_folder() {
###################################################
# Copy/Clone a public gdrive file/folder from another/same gdrive account
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET
+# Globals: 6 variables, 2 functions
+# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN
# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line
# Arguments: 5
# ${1} = update or upload ( upload type )
# ${2} = file id to upload
# ${3} = root dir id for file
-# ${4} = Access Token
-# ${5} = name of file
-# ${6} = size of file
+# ${4} = name of file
+# ${5} = size of file
# Result: On
# Success - Upload/Update file and export FILE_ID
# Error - return 1
@@ -845,8 +842,8 @@ _upload_folder() {
# https://developers.google.com/drive/api/v2/reference/files/copy
###################################################
_clone_file() {
- [[ $# -lt 4 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare job="${1}" file_id="${2}" file_root_id="${3}" token="${4}" name="${5}" size="${6}"
+ [[ $# -lt 5 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
+ declare job="${1}" file_id="${2}" file_root_id="${3}" name="${4}" size="${5}"
declare clone_file_post_data clone_file_response readable_size _file_id && STRING="Cloned"
clone_file_post_data="{\"parents\": [\"${file_root_id}\"]}"
readable_size="$(_bytes_to_human "${size}")"
@@ -856,7 +853,7 @@ _clone_file() {
if [[ ${job} = update ]]; then
declare file_check_json
# Check if file actually exists.
- if file_check_json="$(_check_existing_file "${name}" "${file_root_id}" "${token}")"; then
+ if file_check_json="$(_check_existing_file "${name}" "${file_root_id}")"; then
if [[ -n ${SKIP_DUPLICATES} ]]; then
_collect_file_info "${file_check_json}" || return 1
_clear_line 1
@@ -864,12 +861,11 @@ _clone_file() {
else
_print_center "justify" "Overwriting file.." "-"
{ _file_id="$(_json_value id 1 1 <<< "${file_check_json}")" &&
- clone_file_post_data="$(_drive_info "${_file_id}" "parents,writersCanShare" "${token}")"; } ||
+ clone_file_post_data="$(_drive_info "${_file_id}" "parents,writersCanShare")"; } ||
{ _error_logging_upload "${name}" "${post_data:-${file_check_json}}" && return 1; }
if [[ ${_file_id} != "${file_id}" ]]; then
- curl --compressed -s \
+ _api_request -s \
-X DELETE \
- -H "Authorization: Bearer ${token}" \
"${API_URL}/drive/${API_VERSION}/files/${_file_id}?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>| /dev/null 1>&2 || :
STRING="Updated"
else
@@ -884,9 +880,8 @@ _clone_file() {
fi
# shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic.
- clone_file_response="$(curl --compressed ${CURL_PROGRESS} \
+ clone_file_response="$(_api_request ${CURL_PROGRESS} \
-X POST \
- -H "Authorization: Bearer ${token}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${clone_file_post_data}" \
"${API_URL}/drive/${API_VERSION}/files/${file_id}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)"
@@ -898,28 +893,26 @@ _clone_file() {
###################################################
# Share a gdrive file/folder
-# Globals: 2 variables, 4 functions
-# Variables - API_URL and API_VERSION
+# Globals: 3 variables, 4 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value, _print_center, _clear_line
-# Arguments: 3
+# Arguments: 2
# ${1} = gdrive ID of folder/file
-# ${2} = Access Token
-# ${3} = Email to which file will be shared ( optional )
+# ${2} = Email to which file will be shared ( optional )
# Result: read description
# Reference:
# https://developers.google.com/drive/api/v3/manage-sharing
###################################################
_share_id() {
[[ $# -lt 2 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
- declare id="${1}" token="${2}" share_email="${3}" role="reader" type="${share_email:+user}"
+ declare id="${1}" share_email="${2}" role="reader" type="${share_email:+user}"
declare type share_post_data share_post_data share_response
"${EXTRA_LOG}" "justify" "Sharing.." "-" 1>&2
share_post_data="{\"role\":\"${role}\",\"type\":\"${type:-anyone}\"${share_email:+,\\\"emailAddress\\\":\\\"${share_email}\\\"}}"
- share_response="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ share_response="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X POST \
- -H "Authorization: Bearer ${token}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${share_post_data}" \
"${API_URL}/drive/${API_VERSION}/files/${id}/permissions?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
@@ -1296,15 +1289,40 @@ _check_credentials() {
fi
}
- [[ -z ${ACCESS_TOKEN} || ${ACCESS_TOKEN_EXPIRY} -lt "$(printf "%(%s)T\\n" "-1")" ]] && { _get_access_token_and_update || return 1; }
+ [[ -z ${ACCESS_TOKEN} || ${ACCESS_TOKEN_EXPIRY:-0} -lt "$(printf "%(%s)T\\n" "-1")" ]] && { _get_access_token_and_update || return 1; }
+
+ # launch a background service to check access token and update it
+ # checks ACCESS_TOKEN_EXPIRY, try to update before 5 mins of expiry, a fresh token gets 60 mins
+ # process will be killed when script exits
+ {
+ while :; do
+ # print access token to tmpfile so every function can source it and get access_token value
+ printf "%s\n" "export ACCESS_TOKEN=\"${ACCESS_TOKEN}\"" >| "${TMPFILE}_ACCESS_TOKEN"
+ CURRENT_TIME="$(printf "%(%s)T\\n" "-1")"
+ REMAINING_TOKEN_TIME="$((ACCESS_TOKEN_EXPIRY - CURRENT_TIME))"
+ if [[ ${REMAINING_TOKEN_TIME} -le 300 ]]; then
+ # timeout after 30 seconds, it shouldn't take too long anyway
+ _timeout 30 _get_access_token_and_update || :
+ else
+ TOKEN_PROCESS_TIME_TO_SLEEP="$(if [[ ${REMAINING_TOKEN_TIME} -le 301 ]]; then
+ printf "0\n"
+ else
+ printf "%s\n" "$((REMAINING_TOKEN_TIME - 300))"
+ fi)"
+ sleep "${TOKEN_PROCESS_TIME_TO_SLEEP}"
+ fi
+ sleep 1
+ done
+ } &
+ ACCESS_TOKEN_SERVICE_PID="${!}"
return 0
}
###################################################
# Setup root directory where all file/folders will be uploaded/updated
-# Globals: 6 variables, 5 functions
-# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET, ACCESS_TOKEN
+# Globals: 5 variables, 5 functions
+# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET
# Functions - _print_center, _drive_info, _extract_id, _update_config, _json_value
# Arguments: 1
# ${1} = Positive integer ( amount of time in seconds to sleep )
@@ -1317,7 +1335,7 @@ _check_credentials() {
_setup_root_dir() {
_check_root_id() {
declare json rootid
- json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id" "${ACCESS_TOKEN}")"
+ json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id")"
if ! rootid="$(_json_value id 1 1 <<< "${json}")"; then
{ [[ ${json} =~ "File not found" ]] && "${QUIET:-_print_center}" "justify" "Given root folder" " ID/URL invalid." "=" 1>&2; } || {
printf "%s\n" "${json}" 1>&2
@@ -1329,7 +1347,7 @@ _setup_root_dir() {
return 0
}
_check_root_id_name() {
- ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" "${ACCESS_TOKEN}" | _json_value name || :)"
+ ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" | _json_value name || :)"
"${1:-:}" ROOT_FOLDER_NAME "${ROOT_FOLDER_NAME}" "${CONFIG}"
return 0
}
@@ -1353,8 +1371,8 @@ _setup_root_dir() {
# Setup Workspace folder
# Check if the given folder exists in google drive.
# If not then the folder is created in google drive under the configured root folder.
-# Globals: 3 variables, 3 functions
-# Variables - FOLDERNAME, ROOT_FOLDER, ACCESS_TOKEN
+# Globals: 2 variables, 3 functions
+# Variables - FOLDERNAME, ROOT_FOLDER
# Functions - _create_directory, _drive_info, _json_value
# Arguments: None
# Result: Read Description
@@ -1364,9 +1382,9 @@ _setup_workspace() {
WORKSPACE_FOLDER_ID="${ROOT_FOLDER}"
WORKSPACE_FOLDER_NAME="${ROOT_FOLDER_NAME}"
else
- WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}" "${ACCESS_TOKEN}")" ||
+ WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}")" ||
{ printf "%s\n" "${WORKSPACE_FOLDER_ID}" 1>&2 && return 1; }
- WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name "${ACCESS_TOKEN}" | _json_value name 1 1)" ||
+ WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name | _json_value name 1 1)" ||
{ printf "%s\n" "${WORKSPACE_FOLDER_NAME}" 1>&2 && return 1; }
fi
return 0
@@ -1389,16 +1407,16 @@ _setup_workspace() {
###################################################
_process_arguments() {
export API_URL API_VERSION TOKEN_URL ACCESS_TOKEN \
- LOG_FILE_ID OVERWRITE UPLOAD_MODE SKIP_DUPLICATES CURL_SPEED RETRY UTILS_FOLDER \
+ LOG_FILE_ID OVERWRITE UPLOAD_MODE SKIP_DUPLICATES CURL_SPEED RETRY UTILS_FOLDER TMPFILE \
QUIET VERBOSE VERBOSE_PROGRESS CURL_PROGRESS CURL_PROGRESS_EXTRA CURL_PROGRESS_EXTRA_CLEAR COLUMNS EXTRA_LOG PARALLEL_UPLOAD
export -f _bytes_to_human _dirname _json_value _url_encode _support_ansi_escapes _newline _print_center_quiet _print_center _clear_line \
- _get_access_token_and_update _check_existing_file _upload_file _upload_file_main _clone_file _collect_file_info _generate_upload_link _upload_file_from_uri _full_upload \
+ _api_request _get_access_token_and_update _check_existing_file _upload_file _upload_file_main _clone_file _collect_file_info _generate_upload_link _upload_file_from_uri _full_upload \
_normal_logging_upload _error_logging_upload _log_upload_session _remove_upload_session _upload_folder _share_id _get_rootdir_id
# on successful uploads
_share_and_print_link() {
- "${SHARE:-:}" "${1:-}" "${ACCESS_TOKEN}" "${SHARE_EMAIL}"
+ "${SHARE:-:}" "${1:-}" "${SHARE_EMAIL}"
[[ -z ${HIDE_INFO} ]] && {
_print_center "justify" "DriveLink" "${SHARE:+ (SHARED)}" "-"
_support_ansi_escapes && [[ ${COLUMNS} -gt 45 ]] && _print_center "normal" "↓ ↓ ↓" ' '
@@ -1452,7 +1470,7 @@ _process_arguments() {
"${QUIET:-_print_center}" "justify" "Folder: ${FOLDER_NAME} " "| ${NO_OF_FILES} File(s)" "=" && printf "\n"
"${EXTRA_LOG}" "justify" "Creating folder.." "-"
- { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}" "${ACCESS_TOKEN}")" && export ID; } ||
+ { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}")" && export ID; } ||
{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; }
_clear_line 1 && DIRIDS="${ID}"
@@ -1476,7 +1494,7 @@ _process_arguments() {
NEXTROOTDIRID="${__temp%%"|:_//_:|${__dir}|:_//_:|"}"
NEWDIR="${dir##*/}" && _print_center "justify" "Name: ${NEWDIR}" "-" 1>&2
- ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}" "${ACCESS_TOKEN}")" ||
+ ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}")" ||
{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; }
# Store sub-folder directory IDs and it's path for later use.
@@ -1518,7 +1536,7 @@ _process_arguments() {
{ [[ ${Aseen[${gdrive_id}]} ]] && continue; } || Aseen[${gdrive_id}]=x
_print_center "justify" "Given Input" ": ID" "="
"${EXTRA_LOG}" "justify" "Checking if id exists.." "-"
- json="$(_drive_info "${gdrive_id}" "name,mimeType,size" "${ACCESS_TOKEN}" || :)"
+ json="$(_drive_info "${gdrive_id}" "name,mimeType,size" || :)"
if ! _json_value code 1 1 <<< "${json}" 2>| /dev/null 1>&2; then
type="$(_json_value mimeType 1 1 <<< "${json}" || :)"
name="$(_json_value name 1 1 <<< "${json}" || :)"
@@ -1530,7 +1548,7 @@ _process_arguments() {
else
_print_center "justify" "Given Input" ": File ID" "="
_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n"
- _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${ACCESS_TOKEN}" "${name}" "${size}" ||
+ _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${name}" "${size}" ||
{ for _ in 1 2; do _clear_line 1; done && continue; }
fi
_share_and_print_link "${FILE_ID}"
@@ -1557,13 +1575,22 @@ main() {
_setup_arguments "${@}"
"${SKIP_INTERNET_CHECK:-_check_internet}"
- [[ -n ${PARALLEL_UPLOAD} ]] && {
- { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || TMPFILE="${PWD}/$(printf "%(%s)T\\n" "-1").LOG"
- }
+ # create tempfile
+ { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || TMPFILE="${PWD}/$(printf "%(%s)T\\n" "-1").LOG"
_cleanup() {
{
- [[ -n ${PARALLEL_UPLOAD} ]] && rm -f "${TMPFILE:?}"*
+ rm -f "${TMPFILE:?}"*
+
+ # grab all chidren processes of access token service
+ # https://askubuntu.com/a/512872
+ token_service_pids="$(ps --ppid="${ACCESS_TOKEN_SERVICE_PID}" -o pid=)"
+ # first kill parent id, then children processes
+ kill "${ACCESS_TOKEN_SERVICE_PID}"
+ for pid in ${token_service_pids}; do
+ kill "${pid}"
+ done
+
export abnormal_exit && if [[ -n ${abnormal_exit} ]]; then
printf "\n\n%s\n" "Script exited manually."
kill -- -$$ &
diff --git a/bash/upload.bash b/bash/upload.bash
index 0f166d3..0de1c74 100755
--- a/bash/upload.bash
+++ b/bash/upload.bash
@@ -367,15 +367,40 @@ _check_credentials() {
fi
}
- [[ -z ${ACCESS_TOKEN} || ${ACCESS_TOKEN_EXPIRY} -lt "$(printf "%(%s)T\\n" "-1")" ]] && { _get_access_token_and_update || return 1; }
+ [[ -z ${ACCESS_TOKEN} || ${ACCESS_TOKEN_EXPIRY:-0} -lt "$(printf "%(%s)T\\n" "-1")" ]] && { _get_access_token_and_update || return 1; }
+
+ # launch a background service to check access token and update it
+ # checks ACCESS_TOKEN_EXPIRY, try to update before 5 mins of expiry, a fresh token gets 60 mins
+ # process will be killed when script exits
+ {
+ while :; do
+ # print access token to tmpfile so every function can source it and get access_token value
+ printf "%s\n" "export ACCESS_TOKEN=\"${ACCESS_TOKEN}\"" >| "${TMPFILE}_ACCESS_TOKEN"
+ CURRENT_TIME="$(printf "%(%s)T\\n" "-1")"
+ REMAINING_TOKEN_TIME="$((ACCESS_TOKEN_EXPIRY - CURRENT_TIME))"
+ if [[ ${REMAINING_TOKEN_TIME} -le 300 ]]; then
+ # timeout after 30 seconds, it shouldn't take too long anyway
+ _timeout 30 _get_access_token_and_update || :
+ else
+ TOKEN_PROCESS_TIME_TO_SLEEP="$(if [[ ${REMAINING_TOKEN_TIME} -le 301 ]]; then
+ printf "0\n"
+ else
+ printf "%s\n" "$((REMAINING_TOKEN_TIME - 300))"
+ fi)"
+ sleep "${TOKEN_PROCESS_TIME_TO_SLEEP}"
+ fi
+ sleep 1
+ done
+ } &
+ ACCESS_TOKEN_SERVICE_PID="${!}"
return 0
}
###################################################
# Setup root directory where all file/folders will be uploaded/updated
-# Globals: 6 variables, 5 functions
-# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET, ACCESS_TOKEN
+# Globals: 5 variables, 5 functions
+# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET
# Functions - _print_center, _drive_info, _extract_id, _update_config, _json_value
# Arguments: 1
# ${1} = Positive integer ( amount of time in seconds to sleep )
@@ -388,7 +413,7 @@ _check_credentials() {
_setup_root_dir() {
_check_root_id() {
declare json rootid
- json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id" "${ACCESS_TOKEN}")"
+ json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id")"
if ! rootid="$(_json_value id 1 1 <<< "${json}")"; then
{ [[ ${json} =~ "File not found" ]] && "${QUIET:-_print_center}" "justify" "Given root folder" " ID/URL invalid." "=" 1>&2; } || {
printf "%s\n" "${json}" 1>&2
@@ -400,7 +425,7 @@ _setup_root_dir() {
return 0
}
_check_root_id_name() {
- ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" "${ACCESS_TOKEN}" | _json_value name || :)"
+ ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" | _json_value name || :)"
"${1:-:}" ROOT_FOLDER_NAME "${ROOT_FOLDER_NAME}" "${CONFIG}"
return 0
}
@@ -424,8 +449,8 @@ _setup_root_dir() {
# Setup Workspace folder
# Check if the given folder exists in google drive.
# If not then the folder is created in google drive under the configured root folder.
-# Globals: 3 variables, 3 functions
-# Variables - FOLDERNAME, ROOT_FOLDER, ACCESS_TOKEN
+# Globals: 2 variables, 3 functions
+# Variables - FOLDERNAME, ROOT_FOLDER
# Functions - _create_directory, _drive_info, _json_value
# Arguments: None
# Result: Read Description
@@ -435,9 +460,9 @@ _setup_workspace() {
WORKSPACE_FOLDER_ID="${ROOT_FOLDER}"
WORKSPACE_FOLDER_NAME="${ROOT_FOLDER_NAME}"
else
- WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}" "${ACCESS_TOKEN}")" ||
+ WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}")" ||
{ printf "%s\n" "${WORKSPACE_FOLDER_ID}" 1>&2 && return 1; }
- WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name "${ACCESS_TOKEN}" | _json_value name 1 1)" ||
+ WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name | _json_value name 1 1)" ||
{ printf "%s\n" "${WORKSPACE_FOLDER_NAME}" 1>&2 && return 1; }
fi
return 0
@@ -460,16 +485,16 @@ _setup_workspace() {
###################################################
_process_arguments() {
export API_URL API_VERSION TOKEN_URL ACCESS_TOKEN \
- LOG_FILE_ID OVERWRITE UPLOAD_MODE SKIP_DUPLICATES CURL_SPEED RETRY UTILS_FOLDER \
+ LOG_FILE_ID OVERWRITE UPLOAD_MODE SKIP_DUPLICATES CURL_SPEED RETRY UTILS_FOLDER TMPFILE \
QUIET VERBOSE VERBOSE_PROGRESS CURL_PROGRESS CURL_PROGRESS_EXTRA CURL_PROGRESS_EXTRA_CLEAR COLUMNS EXTRA_LOG PARALLEL_UPLOAD
export -f _bytes_to_human _dirname _json_value _url_encode _support_ansi_escapes _newline _print_center_quiet _print_center _clear_line \
- _get_access_token_and_update _check_existing_file _upload_file _upload_file_main _clone_file _collect_file_info _generate_upload_link _upload_file_from_uri _full_upload \
+ _api_request _get_access_token_and_update _check_existing_file _upload_file _upload_file_main _clone_file _collect_file_info _generate_upload_link _upload_file_from_uri _full_upload \
_normal_logging_upload _error_logging_upload _log_upload_session _remove_upload_session _upload_folder _share_id _get_rootdir_id
# on successful uploads
_share_and_print_link() {
- "${SHARE:-:}" "${1:-}" "${ACCESS_TOKEN}" "${SHARE_EMAIL}"
+ "${SHARE:-:}" "${1:-}" "${SHARE_EMAIL}"
[[ -z ${HIDE_INFO} ]] && {
_print_center "justify" "DriveLink" "${SHARE:+ (SHARED)}" "-"
_support_ansi_escapes && [[ ${COLUMNS} -gt 45 ]] && _print_center "normal" "↓ ↓ ↓" ' '
@@ -523,7 +548,7 @@ _process_arguments() {
"${QUIET:-_print_center}" "justify" "Folder: ${FOLDER_NAME} " "| ${NO_OF_FILES} File(s)" "=" && printf "\n"
"${EXTRA_LOG}" "justify" "Creating folder.." "-"
- { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}" "${ACCESS_TOKEN}")" && export ID; } ||
+ { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}")" && export ID; } ||
{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; }
_clear_line 1 && DIRIDS="${ID}"
@@ -547,7 +572,7 @@ _process_arguments() {
NEXTROOTDIRID="${__temp%%"|:_//_:|${__dir}|:_//_:|"}"
NEWDIR="${dir##*/}" && _print_center "justify" "Name: ${NEWDIR}" "-" 1>&2
- ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}" "${ACCESS_TOKEN}")" ||
+ ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}")" ||
{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; }
# Store sub-folder directory IDs and it's path for later use.
@@ -589,7 +614,7 @@ _process_arguments() {
{ [[ ${Aseen[${gdrive_id}]} ]] && continue; } || Aseen[${gdrive_id}]=x
_print_center "justify" "Given Input" ": ID" "="
"${EXTRA_LOG}" "justify" "Checking if id exists.." "-"
- json="$(_drive_info "${gdrive_id}" "name,mimeType,size" "${ACCESS_TOKEN}" || :)"
+ json="$(_drive_info "${gdrive_id}" "name,mimeType,size" || :)"
if ! _json_value code 1 1 <<< "${json}" 2>| /dev/null 1>&2; then
type="$(_json_value mimeType 1 1 <<< "${json}" || :)"
name="$(_json_value name 1 1 <<< "${json}" || :)"
@@ -601,7 +626,7 @@ _process_arguments() {
else
_print_center "justify" "Given Input" ": File ID" "="
_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n"
- _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${ACCESS_TOKEN}" "${name}" "${size}" ||
+ _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${name}" "${size}" ||
{ for _ in 1 2; do _clear_line 1; done && continue; }
fi
_share_and_print_link "${FILE_ID}"
@@ -628,13 +653,22 @@ main() {
_setup_arguments "${@}"
"${SKIP_INTERNET_CHECK:-_check_internet}"
- [[ -n ${PARALLEL_UPLOAD} ]] && {
- { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || TMPFILE="${PWD}/$(printf "%(%s)T\\n" "-1").LOG"
- }
+ # create tempfile
+ { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || TMPFILE="${PWD}/$(printf "%(%s)T\\n" "-1").LOG"
_cleanup() {
{
- [[ -n ${PARALLEL_UPLOAD} ]] && rm -f "${TMPFILE:?}"*
+ rm -f "${TMPFILE:?}"*
+
+ # grab all chidren processes of access token service
+ # https://askubuntu.com/a/512872
+ token_service_pids="$(ps --ppid="${ACCESS_TOKEN_SERVICE_PID}" -o pid=)"
+ # first kill parent id, then children processes
+ kill "${ACCESS_TOKEN_SERVICE_PID}"
+ for pid in ${token_service_pids}; do
+ kill "${pid}"
+ done
+
export abnormal_exit && if [[ -n ${abnormal_exit} ]]; then
printf "\n\n%s\n" "Script exited manually."
kill -- -$$ &
diff --git a/sh/drive-utils.sh b/sh/drive-utils.sh
index d2523f5..8ef10b5 100755
--- a/sh/drive-utils.sh
+++ b/sh/drive-utils.sh
@@ -1,4 +1,16 @@
#!/usr/bin/env sh
+# shellcheck source=/dev/null
+
+###################################################
+# A simple wrapper to check tempfile for access token and make authorized oauth requests to drive api
+###################################################
+_api_request() {
+ . "${TMPFILE}_ACCESS_TOKEN"
+
+ curl --compressed \
+ -H "Authorization: Bearer ${ACCESS_TOKEN}" \
+ "${@}"
+}
###################################################
# Method to regenerate access_token ( also updates in config ).
@@ -12,8 +24,7 @@ _get_access_token_and_update() {
RESPONSE="${1:-$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")}" || :
if ACCESS_TOKEN="$(printf "%s\n" "${RESPONSE}" | _json_value access_token 1 1)"; then
_update_config ACCESS_TOKEN "${ACCESS_TOKEN}" "${CONFIG}"
- { ACCESS_TOKEN_EXPIRY="$(curl --compressed -s "${API_URL}/oauth2/${API_VERSION}/tokeninfo?access_token=${ACCESS_TOKEN}" | _json_value exp 1 1)" &&
- _update_config ACCESS_TOKEN_EXPIRY "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}"; } || { "${QUIET:-_print_center}" "normal" "Error: Couldn't update access token expiry." "-" 1>&2 && return 1; }
+ _update_config ACCESS_TOKEN_EXPIRY "$(($(date +"%s") + $(printf "%s\n" "${RESPONSE}" | _json_value expires_in 1 1) - 1))" "${CONFIG}"
else
"${QUIET:-_print_center}" "justify" "Error: Something went wrong" ", printing error." "=" 1>&2
printf "%s\n" "${RESPONSE}" 1>&2
@@ -74,13 +85,12 @@ _error_logging_upload() {
###################################################
# Get information for a gdrive folder/file.
-# Globals: 2 variables, 1 function
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 1 function
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = folder/file gdrive id
# ${2} = information to fetch, e.g name, id
-# ${3} = Access Token
# Result: On
# Success - print fetched value
# Error - print "message" field from the json
@@ -88,13 +98,12 @@ _error_logging_upload() {
# https://developers.google.com/drive/api/v3/search-files
###################################################
_drive_info() {
- [ $# -lt 3 ] && printf "Missing arguments\n" && return 1
- folder_id_drive_info="${1}" fetch_drive_info="${2}" token_drive_info="${3}"
+ [ $# -lt 2 ] && printf "Missing arguments\n" && return 1
+ folder_id_drive_info="${1}" fetch_drive_info="${2}"
unset search_response_drive_info
"${EXTRA_LOG}" "justify" "Fetching info.." "-" 1>&2
- search_response_drive_info="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token_drive_info}" \
+ search_response_drive_info="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files/${folder_id_drive_info}?fields=${fetch_drive_info}&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
_clear_line 1 1>&2
@@ -104,27 +113,25 @@ _drive_info() {
###################################################
# Search for an existing file on gdrive with write permission.
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 2 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = file name
# ${2} = root dir id of file
-# ${3} = Access Token
# Result: print file id else blank
# Reference:
# https://developers.google.com/drive/api/v3/search-files
###################################################
_check_existing_file() (
- [ $# -lt 3 ] && printf "Missing arguments\n" && return 1
- name_check_existing_file="${1##*/}" rootdir_check_existing_file="${2}" token_check_existing_file="${3}"
+ [ $# -lt 2 ] && printf "Missing arguments\n" && return 1
+ name_check_existing_file="${1##*/}" rootdir_check_existing_file="${2}"
unset query_check_existing_file response_check_existing_file id_check_existing_file
"${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2
query_check_existing_file="$(_url_encode "name='${name_check_existing_file}' and '${rootdir_check_existing_file}' in parents and trashed=false and 'me' in writers")"
- response_check_existing_file="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token_check_existing_file}" \
+ response_check_existing_file="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files?q=${query_check_existing_file}&fields=files(id,name,mimeType)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
_clear_line 1 1>&2
@@ -134,35 +141,32 @@ _check_existing_file() (
###################################################
# Create/Check directory in google drive.
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 2 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = dir name
# ${2} = root dir id of given dir
-# ${3} = Access Token
# Result: print folder id
# Reference:
# https://developers.google.com/drive/api/v3/folder
###################################################
_create_directory() (
- [ $# -lt 3 ] && printf "Missing arguments\n" && return 1
- dirname_create_directory="${1##*/}" rootdir_create_directory="${2}" token_create_directory="${3}"
+ [ $# -lt 2 ] && printf "Missing arguments\n" && return 1
+ dirname_create_directory="${1##*/}" rootdir_create_directory="${2}"
unset query_create_directory search_response_create_directory folder_id_create_directory
"${EXTRA_LOG}" "justify" "Creating GDRIVE DIR:" " ${dirname_create_directory}" "-" 1>&2
query_create_directory="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name='${dirname_create_directory}' and trashed=false and '${rootdir_create_directory}' in parents")"
- search_response_create_directory="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token_create_directory}" \
+ search_response_create_directory="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files?q=${query_create_directory}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
if ! folder_id_create_directory="$(printf "%s\n" "${search_response_create_directory}" | _json_value id 1 1)"; then
unset create_folder_post_data_create_directory create_folder_response_create_directory
create_folder_post_data_create_directory="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${dirname_create_directory}\",\"parents\": [\"${rootdir_create_directory}\"]}"
- create_folder_response_create_directory="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ create_folder_response_create_directory="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X POST \
- -H "Authorization: Bearer ${token_create_directory}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${create_folder_post_data_create_directory}" \
"${API_URL}/drive/${API_VERSION}/files?fields=id&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
@@ -179,9 +183,8 @@ _create_directory() (
# generate resumable upload link
_generate_upload_link() {
"${EXTRA_LOG}" "justify" "Generating upload link.." "-" 1>&2
- uploadlink_upload_file="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ uploadlink_upload_file="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X "${request_method_upload_file}" \
- -H "Authorization: Bearer ${token_upload_file}" \
-H "Content-Type: application/json; charset=UTF-8" \
-H "X-Upload-Content-Type: ${mime_type_upload_file}" \
-H "X-Upload-Content-Length: ${inputsize_upload_file}" \
@@ -202,9 +205,8 @@ _generate_upload_link() {
_upload_file_from_uri() {
_print_center "justify" "Uploading.." "-"
# shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic.
- upload_body_upload_file="$(curl --compressed ${CURL_PROGRESS} \
+ upload_body_upload_file="$(_api_request ${CURL_PROGRESS} \
-X PUT \
- -H "Authorization: Bearer ${token_upload_file}" \
-H "Content-Type: ${mime_type_upload_file}" \
-H "Content-Length: ${content_length_upload_file}" \
-H "Slug: ${slug_upload_file}" \
@@ -253,16 +255,15 @@ _full_upload() {
###################################################
# Upload ( Create/Update ) files on gdrive.
# Interrupted uploads can be resumed.
-# Globals: 7 variables, 10 functions
-# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID
+# Globals: 8 variables, 10 functions
+# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN
# Functions - _url_encode, _json_value, _print_center, _bytes_to_human
# _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session
# _full_upload, _collect_file_info
-# Arguments: 5
+# Arguments: 3
# ${1} = update or upload ( upload type )
# ${2} = file to upload
# ${3} = root dir id for file
-# ${4} = Access Token
# Result: On
# Success - Upload/Update file and export FILE_ID
# Error - return 1
@@ -272,8 +273,8 @@ _full_upload() {
# https://developers.google.com/drive/api/v3/reference/files/update
###################################################
_upload_file() {
- [ $# -lt 4 ] && printf "Missing arguments\n" && return 1
- job_upload_file="${1}" input_upload_file="${2}" folder_id_upload_file="${3}" token_upload_file="${4}"
+ [ $# -lt 3 ] && printf "Missing arguments\n" && return 1
+ job_upload_file="${1}" input_upload_file="${2}" folder_id_upload_file="${3}"
unset slug_upload_file inputname_upload_file extension_upload_file inputsize_upload_file readable_size_upload_file request_method_upload_file \
url_upload_file postdata_upload_file uploadlink_upload_file upload_body_upload_file mime_type_upload_file resume_args_upload_file
@@ -297,7 +298,7 @@ _upload_file() {
[ "${job_upload_file}" = update ] && {
unset file_check_json_upload_file
# Check if file actually exists, and create if not.
- if file_check_json_upload_file="$(_check_existing_file "${slug_upload_file}" "${folder_id_upload_file}" "${token_upload_file}")"; then
+ if file_check_json_upload_file="$(_check_existing_file "${slug_upload_file}" "${folder_id_upload_file}")"; then
if [ -n "${SKIP_DUPLICATES}" ]; then
# Stop upload if already exists ( -d/--skip-duplicates )
_collect_file_info "${file_check_json_upload_file}" "${slug_upload_file}" || return 1
@@ -386,9 +387,9 @@ _upload_file_main() {
retry_upload_file_main="${RETRY:-0}" && unset RETURN_STATUS
until [ "${retry_upload_file_main}" -le 0 ] && [ -n "${RETURN_STATUS}" ]; do
if [ -n "${4}" ]; then
- _upload_file "${UPLOAD_MODE:-create}" "${file_upload_file_main}" "${dirid_upload_file_main}" "${ACCESS_TOKEN}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break
+ _upload_file "${UPLOAD_MODE:-create}" "${file_upload_file_main}" "${dirid_upload_file_main}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break
else
- _upload_file "${UPLOAD_MODE:-create}" "${file_upload_file_main}" "${dirid_upload_file_main}" "${ACCESS_TOKEN}" && RETURN_STATUS=1 && break
+ _upload_file "${UPLOAD_MODE:-create}" "${file_upload_file_main}" "${dirid_upload_file_main}" && RETURN_STATUS=1 && break
fi
RETURN_STATUS=2 retry_upload_file_main="$((retry_upload_file_main - 1))" && continue
done
@@ -398,8 +399,8 @@ _upload_file_main() {
###################################################
# Upload all files in the given folder, parallelly or non-parallely and show progress
-# Globals: 2 variables, 3 functions
-# Variables - VERBOSE and VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET
+# Globals: 7 variables, 3 functions
+# Variables - VERBOSE, VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET
# Functions - _clear_line, _newline, _print_center and _upload_file_main
# Arguments: 4
# ${1} = parallel or normal
@@ -461,16 +462,15 @@ EOF
###################################################
# Copy/Clone a public gdrive file/folder from another/same gdrive account
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET
-# Functions - _check_existing_file, _json_value, _bytes_to_human, _clear_line
+# Globals: 6 variables, 2 functions
+# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN
+# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line
# Arguments: 5
# ${1} = update or upload ( upload type )
# ${2} = file id to upload
# ${3} = root dir id for file
-# ${4} = Access Token
-# ${5} = name of file
-# ${6} = size of file
+# ${4} = name of file
+# ${5} = size of file
# Result: On
# Success - Upload/Update file and export FILE_ID
# Error - return 1
@@ -478,8 +478,8 @@ EOF
# https://developers.google.com/drive/api/v2/reference/files/copy
###################################################
_clone_file() {
- [ $# -lt 4 ] && printf "Missing arguments\n" && return 1
- job_clone_file="${1}" file_id_clone_file="${2}" file_root_id_clone_file="${3}" token_clone_file="${4}" name_clone_file="${5}" size_clone_file="${6}"
+ [ $# -lt 5 ] && printf "Missing arguments\n" && return 1
+ job_clone_file="${1}" file_id_clone_file="${2}" file_root_id_clone_file="${3}" name_clone_file="${4}" size_clone_file="${5}"
unset post_data_clone_file response_clone_file readable_size_clone_file && STRING="Cloned"
post_data_clone_file="{\"parents\": [\"${file_root_id_clone_file}\"]}"
readable_size_clone_file="$(printf "%s\n" "${size_clone_file}" | _bytes_to_human)"
@@ -489,7 +489,7 @@ _clone_file() {
if [ "${job_clone_file}" = update ]; then
unset file_check_json_clone_file
# Check if file actually exists.
- if file_check_json_clone_file="$(_check_existing_file "${name_clone_file}" "${file_root_id_clone_file}" "${token_clone_file}")"; then
+ if file_check_json_clone_file="$(_check_existing_file "${name_clone_file}" "${file_root_id_clone_file}")"; then
if [ -n "${SKIP_DUPLICATES}" ]; then
_collect_file_info "${file_check_json_clone_file}" "${name_clone_file}" || return 1
_clear_line 1
@@ -497,12 +497,11 @@ _clone_file() {
else
_print_center "justify" "Overwriting file.." "-"
{ _file_id_clone_file="$(printf "%s\n" "${file_check_json_clone_file}" | _json_value id 1 1)" &&
- post_data_clone_file="$(_drive_info "${_file_id_clone_file}" "parents,writersCanShare" "${token_clone_file}")"; } ||
+ post_data_clone_file="$(_drive_info "${_file_id_clone_file}" "parents,writersCanShare")"; } ||
{ _error_logging_upload "${name_clone_file}" "${post_data_clone_file:-${file_check_json_clone_file}}" && return 1; }
if [ "${_file_id_clone_file}" != "${file_id_clone_file}" ]; then
- curl --compressed -s \
+ _api_request -s \
-X DELETE \
- -H "Authorization: Bearer ${token_clone_file}" \
"${API_URL}/drive/${API_VERSION}/files/${_file_id_clone_file}?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>| /dev/null 1>&2 || :
STRING="Updated"
else
@@ -517,9 +516,8 @@ _clone_file() {
fi
# shellcheck disable=SC2086
- response_clone_file="$(curl --compressed ${CURL_PROGRESS} \
+ response_clone_file="$(_api_request ${CURL_PROGRESS} \
-X POST \
- -H "Authorization: Bearer ${token_clone_file}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${post_data_clone_file}" \
"${API_URL}/drive/${API_VERSION}/files/${file_id_clone_file}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)"
@@ -531,28 +529,26 @@ _clone_file() {
###################################################
# Share a gdrive file/folder
-# Globals: 2 variables, 4 functions
-# Variables - API_URL and API_VERSION
+# Globals: 3 variables, 4 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value, _print_center, _clear_line
-# Arguments: 3
+# Arguments: 2
# ${1} = gdrive ID of folder/file
-# ${2} = Access Token
-# ${3} = Email to which file will be shared ( optional )
+# ${2} = Email to which file will be shared ( optional )
# Result: read description
# Reference:
# https://developers.google.com/drive/api/v3/manage-sharing
###################################################
_share_id() {
[ $# -lt 2 ] && printf "Missing arguments\n" && return 1
- id_share_id="${1}" token_share_id="${2}" share_email_share_id="${3}" role_share_id="reader" type_share_id="${share_email_share_id:+user}"
+ id_share_id="${1}" share_email_share_id="${2}" role_share_id="reader" type_share_id="${share_email_share_id:+user}"
unset post_data_share_id response_share_id
"${EXTRA_LOG}" "justify" "Sharing.." "-" 1>&2
post_data_share_id="{\"role\":\"${role_share_id}\",\"type\":\"${type_share_id:-anyone}\"${share_email_share_id:+,\\\"emailAddress\\\":\\\"${share_email_share_id}\\\"}}"
- response_share_id="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ response_share_id="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X POST \
- -H "Authorization: Bearer ${token_share_id}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${post_data_share_id}" \
"${API_URL}/drive/${API_VERSION}/files/${id_share_id}/permissions?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
diff --git a/sh/google-oauth2.sh b/sh/google-oauth2.sh
index 1eccf81..e259566 100755
--- a/sh/google-oauth2.sh
+++ b/sh/google-oauth2.sh
@@ -33,9 +33,10 @@ _get_token_and_update() {
RESPONSE="${1:-$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")}" || :
ACCESS_TOKEN="$(printf "%s\n" "${RESPONSE}" | _json_value access_token 1 1)"
if [ -n "${ACCESS_TOKEN}" ]; then
- [ -n "${UPDATE}" ] && ACCESS_TOKEN_EXPIRY="$(curl --compressed -s "${API_URL}/oauth2/${API_VERSION}/tokeninfo?access_token=${ACCESS_TOKEN}" | _json_value exp 1 1)"
- "${UPDATE:-:}" ACCESS_TOKEN "${ACCESS_TOKEN}" "${CONFIG}"
- "${UPDATE:-:}" ACCESS_TOKEN_EXPIRY "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}"
+ [ -n "${UPDATE}" ] && {
+ _update_config ACCESS_TOKEN "${ACCESS_TOKEN}" "${CONFIG}"
+ _update_config ACCESS_TOKEN_EXPIRY "$(($(date +"%s") + $(printf "%s\n" "${RESPONSE}" | _json_value expires_in 1 1) - 1))" "${CONFIG}"
+ }
else
_print_center "justify" "Error: Something went wrong" ", printing error." "=" 1>&2
printf "%s\n" "${RESPONSE}" 1>&2
@@ -46,7 +47,7 @@ _get_token_and_update() {
[ "${1}" = create ] || [ "${1}" = refresh ] || _short_help
-[ "${2}" = update ] && UPDATE="_update_config"
+{ [ "${2}" = update ] && UPDATE="true"; } || unset UPDATE
UTILS_FOLDER="${UTILS_FOLDER:-$(pwd)}"
{ . "${UTILS_FOLDER}"/common-utils.sh; } || { printf "Error: Unable to source util files.\n" && exit 1; }
@@ -69,7 +70,7 @@ CONFIG="${CONFIG:-${HOME}/.googledrive.conf}"
# shellcheck source=/dev/null
[ -f "${CONFIG}" ] && . "${CONFIG}"
-! _is_terminal && [ -z "${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}}" ] && {
+! [ -t 2 ] && [ -z "${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}}" ] && {
printf "%s\n" "Error: Script is not running in a terminal, cannot ask for credentials."
printf "%s\n" "Add in config manually if terminal is not accessible. CLIENT_ID, CLIENT_SECRET and REFRESH_TOKEN is required." && return 1
}
diff --git a/sh/release/gupload b/sh/release/gupload
index 54f9662..1dedb5d 100755
--- a/sh/release/gupload
+++ b/sh/release/gupload
@@ -349,6 +349,18 @@ _url_encode() (
q = y ~ /[[:alnum:]]_.!~*\47()-]/ ? q y : q sprintf("%%%02X", z[y])
print q}' "${1}"
)
+# shellcheck source=/dev/null
+
+###################################################
+# A simple wrapper to check tempfile for access token and make authorized oauth requests to drive api
+###################################################
+_api_request() {
+ . "${TMPFILE}_ACCESS_TOKEN"
+
+ curl --compressed \
+ -H "Authorization: Bearer ${ACCESS_TOKEN}" \
+ "${@}"
+}
###################################################
# Method to regenerate access_token ( also updates in config ).
@@ -362,8 +374,7 @@ _get_access_token_and_update() {
RESPONSE="${1:-$(curl --compressed -s -X POST --data "client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&refresh_token=${REFRESH_TOKEN}&grant_type=refresh_token" "${TOKEN_URL}")}" || :
if ACCESS_TOKEN="$(printf "%s\n" "${RESPONSE}" | _json_value access_token 1 1)"; then
_update_config ACCESS_TOKEN "${ACCESS_TOKEN}" "${CONFIG}"
- { ACCESS_TOKEN_EXPIRY="$(curl --compressed -s "${API_URL}/oauth2/${API_VERSION}/tokeninfo?access_token=${ACCESS_TOKEN}" | _json_value exp 1 1)" &&
- _update_config ACCESS_TOKEN_EXPIRY "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}"; } || { "${QUIET:-_print_center}" "normal" "Error: Couldn't update access token expiry." "-" 1>&2 && return 1; }
+ _update_config ACCESS_TOKEN_EXPIRY "$(($(date +"%s") + $(printf "%s\n" "${RESPONSE}" | _json_value expires_in 1 1) - 1))" "${CONFIG}"
else
"${QUIET:-_print_center}" "justify" "Error: Something went wrong" ", printing error." "=" 1>&2
printf "%s\n" "${RESPONSE}" 1>&2
@@ -424,13 +435,12 @@ _error_logging_upload() {
###################################################
# Get information for a gdrive folder/file.
-# Globals: 2 variables, 1 function
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 1 function
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = folder/file gdrive id
# ${2} = information to fetch, e.g name, id
-# ${3} = Access Token
# Result: On
# Success - print fetched value
# Error - print "message" field from the json
@@ -438,13 +448,12 @@ _error_logging_upload() {
# https://developers.google.com/drive/api/v3/search-files
###################################################
_drive_info() {
- [ $# -lt 3 ] && printf "Missing arguments\n" && return 1
- folder_id_drive_info="${1}" fetch_drive_info="${2}" token_drive_info="${3}"
+ [ $# -lt 2 ] && printf "Missing arguments\n" && return 1
+ folder_id_drive_info="${1}" fetch_drive_info="${2}"
unset search_response_drive_info
"${EXTRA_LOG}" "justify" "Fetching info.." "-" 1>&2
- search_response_drive_info="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token_drive_info}" \
+ search_response_drive_info="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files/${folder_id_drive_info}?fields=${fetch_drive_info}&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
_clear_line 1 1>&2
@@ -454,27 +463,25 @@ _drive_info() {
###################################################
# Search for an existing file on gdrive with write permission.
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 2 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = file name
# ${2} = root dir id of file
-# ${3} = Access Token
# Result: print file id else blank
# Reference:
# https://developers.google.com/drive/api/v3/search-files
###################################################
_check_existing_file() (
- [ $# -lt 3 ] && printf "Missing arguments\n" && return 1
- name_check_existing_file="${1##*/}" rootdir_check_existing_file="${2}" token_check_existing_file="${3}"
+ [ $# -lt 2 ] && printf "Missing arguments\n" && return 1
+ name_check_existing_file="${1##*/}" rootdir_check_existing_file="${2}"
unset query_check_existing_file response_check_existing_file id_check_existing_file
"${EXTRA_LOG}" "justify" "Checking if file" " exists on gdrive.." "-" 1>&2
query_check_existing_file="$(_url_encode "name='${name_check_existing_file}' and '${rootdir_check_existing_file}' in parents and trashed=false and 'me' in writers")"
- response_check_existing_file="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token_check_existing_file}" \
+ response_check_existing_file="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files?q=${query_check_existing_file}&fields=files(id,name,mimeType)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
_clear_line 1 1>&2
@@ -484,35 +491,32 @@ _check_existing_file() (
###################################################
# Create/Check directory in google drive.
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION
+# Globals: 3 variables, 2 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value
-# Arguments: 3
+# Arguments: 2
# ${1} = dir name
# ${2} = root dir id of given dir
-# ${3} = Access Token
# Result: print folder id
# Reference:
# https://developers.google.com/drive/api/v3/folder
###################################################
_create_directory() (
- [ $# -lt 3 ] && printf "Missing arguments\n" && return 1
- dirname_create_directory="${1##*/}" rootdir_create_directory="${2}" token_create_directory="${3}"
+ [ $# -lt 2 ] && printf "Missing arguments\n" && return 1
+ dirname_create_directory="${1##*/}" rootdir_create_directory="${2}"
unset query_create_directory search_response_create_directory folder_id_create_directory
"${EXTRA_LOG}" "justify" "Creating GDRIVE DIR:" " ${dirname_create_directory}" "-" 1>&2
query_create_directory="$(_url_encode "mimeType='application/vnd.google-apps.folder' and name='${dirname_create_directory}' and trashed=false and '${rootdir_create_directory}' in parents")"
- search_response_create_directory="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
- -H "Authorization: Bearer ${token_create_directory}" \
+ search_response_create_directory="$(_api_request "${CURL_PROGRESS_EXTRA}" \
"${API_URL}/drive/${API_VERSION}/files?q=${query_create_directory}&fields=files(id)&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
if ! folder_id_create_directory="$(printf "%s\n" "${search_response_create_directory}" | _json_value id 1 1)"; then
unset create_folder_post_data_create_directory create_folder_response_create_directory
create_folder_post_data_create_directory="{\"mimeType\": \"application/vnd.google-apps.folder\",\"name\": \"${dirname_create_directory}\",\"parents\": [\"${rootdir_create_directory}\"]}"
- create_folder_response_create_directory="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ create_folder_response_create_directory="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X POST \
- -H "Authorization: Bearer ${token_create_directory}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${create_folder_post_data_create_directory}" \
"${API_URL}/drive/${API_VERSION}/files?fields=id&supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
@@ -529,9 +533,8 @@ _create_directory() (
# generate resumable upload link
_generate_upload_link() {
"${EXTRA_LOG}" "justify" "Generating upload link.." "-" 1>&2
- uploadlink_upload_file="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ uploadlink_upload_file="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X "${request_method_upload_file}" \
- -H "Authorization: Bearer ${token_upload_file}" \
-H "Content-Type: application/json; charset=UTF-8" \
-H "X-Upload-Content-Type: ${mime_type_upload_file}" \
-H "X-Upload-Content-Length: ${inputsize_upload_file}" \
@@ -552,9 +555,8 @@ _generate_upload_link() {
_upload_file_from_uri() {
_print_center "justify" "Uploading.." "-"
# shellcheck disable=SC2086 # Because unnecessary to another check because ${CURL_PROGRESS} won't be anything problematic.
- upload_body_upload_file="$(curl --compressed ${CURL_PROGRESS} \
+ upload_body_upload_file="$(_api_request ${CURL_PROGRESS} \
-X PUT \
- -H "Authorization: Bearer ${token_upload_file}" \
-H "Content-Type: ${mime_type_upload_file}" \
-H "Content-Length: ${content_length_upload_file}" \
-H "Slug: ${slug_upload_file}" \
@@ -603,16 +605,15 @@ _full_upload() {
###################################################
# Upload ( Create/Update ) files on gdrive.
# Interrupted uploads can be resumed.
-# Globals: 7 variables, 10 functions
-# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID
+# Globals: 8 variables, 10 functions
+# Variables - API_URL, API_VERSION, QUIET, VERBOSE, VERBOSE_PROGRESS, CURL_PROGRESS, LOG_FILE_ID, ACCESS_TOKEN
# Functions - _url_encode, _json_value, _print_center, _bytes_to_human
# _generate_upload_link, _upload_file_from_uri, _log_upload_session, _remove_upload_session
# _full_upload, _collect_file_info
-# Arguments: 5
+# Arguments: 3
# ${1} = update or upload ( upload type )
# ${2} = file to upload
# ${3} = root dir id for file
-# ${4} = Access Token
# Result: On
# Success - Upload/Update file and export FILE_ID
# Error - return 1
@@ -622,8 +623,8 @@ _full_upload() {
# https://developers.google.com/drive/api/v3/reference/files/update
###################################################
_upload_file() {
- [ $# -lt 4 ] && printf "Missing arguments\n" && return 1
- job_upload_file="${1}" input_upload_file="${2}" folder_id_upload_file="${3}" token_upload_file="${4}"
+ [ $# -lt 3 ] && printf "Missing arguments\n" && return 1
+ job_upload_file="${1}" input_upload_file="${2}" folder_id_upload_file="${3}"
unset slug_upload_file inputname_upload_file extension_upload_file inputsize_upload_file readable_size_upload_file request_method_upload_file \
url_upload_file postdata_upload_file uploadlink_upload_file upload_body_upload_file mime_type_upload_file resume_args_upload_file
@@ -647,7 +648,7 @@ _upload_file() {
[ "${job_upload_file}" = update ] && {
unset file_check_json_upload_file
# Check if file actually exists, and create if not.
- if file_check_json_upload_file="$(_check_existing_file "${slug_upload_file}" "${folder_id_upload_file}" "${token_upload_file}")"; then
+ if file_check_json_upload_file="$(_check_existing_file "${slug_upload_file}" "${folder_id_upload_file}")"; then
if [ -n "${SKIP_DUPLICATES}" ]; then
# Stop upload if already exists ( -d/--skip-duplicates )
_collect_file_info "${file_check_json_upload_file}" "${slug_upload_file}" || return 1
@@ -736,9 +737,9 @@ _upload_file_main() {
retry_upload_file_main="${RETRY:-0}" && unset RETURN_STATUS
until [ "${retry_upload_file_main}" -le 0 ] && [ -n "${RETURN_STATUS}" ]; do
if [ -n "${4}" ]; then
- _upload_file "${UPLOAD_MODE:-create}" "${file_upload_file_main}" "${dirid_upload_file_main}" "${ACCESS_TOKEN}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break
+ _upload_file "${UPLOAD_MODE:-create}" "${file_upload_file_main}" "${dirid_upload_file_main}" 2>| /dev/null 1>&2 && RETURN_STATUS=1 && break
else
- _upload_file "${UPLOAD_MODE:-create}" "${file_upload_file_main}" "${dirid_upload_file_main}" "${ACCESS_TOKEN}" && RETURN_STATUS=1 && break
+ _upload_file "${UPLOAD_MODE:-create}" "${file_upload_file_main}" "${dirid_upload_file_main}" && RETURN_STATUS=1 && break
fi
RETURN_STATUS=2 retry_upload_file_main="$((retry_upload_file_main - 1))" && continue
done
@@ -748,8 +749,8 @@ _upload_file_main() {
###################################################
# Upload all files in the given folder, parallelly or non-parallely and show progress
-# Globals: 2 variables, 3 functions
-# Variables - VERBOSE and VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET
+# Globals: 7 variables, 3 functions
+# Variables - VERBOSE, VERBOSE_PROGRESS, NO_OF_PARALLEL_JOBS, NO_OF_FILES, TMPFILE, UTILS_FOLDER and QUIET
# Functions - _clear_line, _newline, _print_center and _upload_file_main
# Arguments: 4
# ${1} = parallel or normal
@@ -811,16 +812,15 @@ EOF
###################################################
# Copy/Clone a public gdrive file/folder from another/same gdrive account
-# Globals: 2 variables, 2 functions
-# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET
-# Functions - _check_existing_file, _json_value, _bytes_to_human, _clear_line
+# Globals: 6 variables, 2 functions
+# Variables - API_URL, API_VERSION, CURL_PROGRESS, LOG_FILE_ID, QUIET, ACCESS_TOKEN
+# Functions - _print_center, _check_existing_file, _json_value, _bytes_to_human, _clear_line
# Arguments: 5
# ${1} = update or upload ( upload type )
# ${2} = file id to upload
# ${3} = root dir id for file
-# ${4} = Access Token
-# ${5} = name of file
-# ${6} = size of file
+# ${4} = name of file
+# ${5} = size of file
# Result: On
# Success - Upload/Update file and export FILE_ID
# Error - return 1
@@ -828,8 +828,8 @@ EOF
# https://developers.google.com/drive/api/v2/reference/files/copy
###################################################
_clone_file() {
- [ $# -lt 4 ] && printf "Missing arguments\n" && return 1
- job_clone_file="${1}" file_id_clone_file="${2}" file_root_id_clone_file="${3}" token_clone_file="${4}" name_clone_file="${5}" size_clone_file="${6}"
+ [ $# -lt 5 ] && printf "Missing arguments\n" && return 1
+ job_clone_file="${1}" file_id_clone_file="${2}" file_root_id_clone_file="${3}" name_clone_file="${4}" size_clone_file="${5}"
unset post_data_clone_file response_clone_file readable_size_clone_file && STRING="Cloned"
post_data_clone_file="{\"parents\": [\"${file_root_id_clone_file}\"]}"
readable_size_clone_file="$(printf "%s\n" "${size_clone_file}" | _bytes_to_human)"
@@ -839,7 +839,7 @@ _clone_file() {
if [ "${job_clone_file}" = update ]; then
unset file_check_json_clone_file
# Check if file actually exists.
- if file_check_json_clone_file="$(_check_existing_file "${name_clone_file}" "${file_root_id_clone_file}" "${token_clone_file}")"; then
+ if file_check_json_clone_file="$(_check_existing_file "${name_clone_file}" "${file_root_id_clone_file}")"; then
if [ -n "${SKIP_DUPLICATES}" ]; then
_collect_file_info "${file_check_json_clone_file}" "${name_clone_file}" || return 1
_clear_line 1
@@ -847,12 +847,11 @@ _clone_file() {
else
_print_center "justify" "Overwriting file.." "-"
{ _file_id_clone_file="$(printf "%s\n" "${file_check_json_clone_file}" | _json_value id 1 1)" &&
- post_data_clone_file="$(_drive_info "${_file_id_clone_file}" "parents,writersCanShare" "${token_clone_file}")"; } ||
+ post_data_clone_file="$(_drive_info "${_file_id_clone_file}" "parents,writersCanShare")"; } ||
{ _error_logging_upload "${name_clone_file}" "${post_data_clone_file:-${file_check_json_clone_file}}" && return 1; }
if [ "${_file_id_clone_file}" != "${file_id_clone_file}" ]; then
- curl --compressed -s \
+ _api_request -s \
-X DELETE \
- -H "Authorization: Bearer ${token_clone_file}" \
"${API_URL}/drive/${API_VERSION}/files/${_file_id_clone_file}?supportsAllDrives=true&includeItemsFromAllDrives=true" 2>| /dev/null 1>&2 || :
STRING="Updated"
else
@@ -867,9 +866,8 @@ _clone_file() {
fi
# shellcheck disable=SC2086
- response_clone_file="$(curl --compressed ${CURL_PROGRESS} \
+ response_clone_file="$(_api_request ${CURL_PROGRESS} \
-X POST \
- -H "Authorization: Bearer ${token_clone_file}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${post_data_clone_file}" \
"${API_URL}/drive/${API_VERSION}/files/${file_id_clone_file}/copy?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)"
@@ -881,28 +879,26 @@ _clone_file() {
###################################################
# Share a gdrive file/folder
-# Globals: 2 variables, 4 functions
-# Variables - API_URL and API_VERSION
+# Globals: 3 variables, 4 functions
+# Variables - API_URL, API_VERSION, ACCESS_TOKEN
# Functions - _url_encode, _json_value, _print_center, _clear_line
-# Arguments: 3
+# Arguments: 2
# ${1} = gdrive ID of folder/file
-# ${2} = Access Token
-# ${3} = Email to which file will be shared ( optional )
+# ${2} = Email to which file will be shared ( optional )
# Result: read description
# Reference:
# https://developers.google.com/drive/api/v3/manage-sharing
###################################################
_share_id() {
[ $# -lt 2 ] && printf "Missing arguments\n" && return 1
- id_share_id="${1}" token_share_id="${2}" share_email_share_id="${3}" role_share_id="reader" type_share_id="${share_email_share_id:+user}"
+ id_share_id="${1}" share_email_share_id="${2}" role_share_id="reader" type_share_id="${share_email_share_id:+user}"
unset post_data_share_id response_share_id
"${EXTRA_LOG}" "justify" "Sharing.." "-" 1>&2
post_data_share_id="{\"role\":\"${role_share_id}\",\"type\":\"${type_share_id:-anyone}\"${share_email_share_id:+,\\\"emailAddress\\\":\\\"${share_email_share_id}\\\"}}"
- response_share_id="$(curl --compressed "${CURL_PROGRESS_EXTRA}" \
+ response_share_id="$(_api_request "${CURL_PROGRESS_EXTRA}" \
-X POST \
- -H "Authorization: Bearer ${token_share_id}" \
-H "Content-Type: application/json; charset=UTF-8" \
-d "${post_data_share_id}" \
"${API_URL}/drive/${API_VERSION}/files/${id_share_id}/permissions?supportsAllDrives=true&includeItemsFromAllDrives=true" || :)" && _clear_line 1 1>&2
@@ -1289,15 +1285,38 @@ _check_credentials() {
fi
}
- [ -z "${ACCESS_TOKEN}" ] || [ "${ACCESS_TOKEN_EXPIRY}" -lt "$(date +'%s')" ] && { _get_access_token_and_update || return 1; }
+ [ -z "${ACCESS_TOKEN}" ] || [ "${ACCESS_TOKEN_EXPIRY:-0}" -lt "$(date +'%s')" ] && { _get_access_token_and_update || return 1; }
+
+ # launch a background service to check access token and update it
+ # checks ACCESS_TOKEN_EXPIRY, try to update before 5 mins of expiry, a fresh token gets 60 mins
+ # process will be killed when script exits
+ {
+ while :; do
+ CURRENT_TIME="$(date +'%s')"
+ REMAINING_TOKEN_TIME="$((CURRENT_TIME - ACCESS_TOKEN_EXPIRY))"
+ if [ "${REMAINING_TOKEN_TIME}" -le 300 ]; then
+ # timeout after 30 seconds, it shouldn't take too long anyway
+ _timeout 30 _get_access_token_and_update || :
+ else
+ TOKEN_PROCESS_TIME_TO_SLEEP="$(if [ "${REMAINING_TOKEN_TIME}" -le 301 ]; then
+ printf "0\n"
+ else
+ printf "%s\n" "$((REMAINING_TOKEN_TIME - 300))"
+ fi)"
+ sleep "${TOKEN_PROCESS_TIME_TO_SLEEP}"
+ fi
+ sleep 1
+ done
+ } &
+ ACCESS_TOKEN_SERVICE_PID="${!}"
return 0
}
###################################################
# Setup root directory where all file/folders will be uploaded/updated
-# Globals: 6 variables, 5 functions
-# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET, ACCESS_TOKEN
+# Globals: 5 variables, 5 functions
+# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET
# Functions - _print, _drive_info, _extract_id, _update_config, _json_value
# Arguments: 1
# ${1}" = Positive integer ( amount of time in seconds to sleep )
@@ -1309,7 +1328,7 @@ _check_credentials() {
###################################################
_setup_root_dir() {
_check_root_id() {
- _setup_root_dir_json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id" "${ACCESS_TOKEN}")"
+ _setup_root_dir_json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id")"
if ! rootid_setup_root_dir="$(printf "%s\n" "${_setup_root_dir_json}" | _json_value id 1 1)"; then
if printf "%s\n" "${_setup_root_dir_json}" | grep "File not found" -q; then
"${QUIET:-_print_center}" "justify" "Given root folder" " ID/URL invalid." "=" 1>&2
@@ -1324,7 +1343,7 @@ _setup_root_dir() {
return 0
}
_check_root_id_name() {
- ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" "${ACCESS_TOKEN}" | _json_value name 1 1 || :)"
+ ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" | _json_value name 1 1 || :)"
"${1:-:}" ROOT_FOLDER_NAME "${ROOT_FOLDER_NAME}" "${CONFIG}"
return 0
}
@@ -1348,8 +1367,8 @@ _setup_root_dir() {
# Setup Workspace folder
# Check if the given folder exists in google drive.
# If not then the folder is created in google drive under the configured root folder.
-# Globals: 3 variables, 3 functions
-# Variables - FOLDERNAME, ROOT_FOLDER, ACCESS_TOKEN
+# Globals: 2 variables, 3 functions
+# Variables - FOLDERNAME, ROOT_FOLDER
# Functions - _create_directory, _drive_info, _json_value
# Arguments: None
# Result: Read Description
@@ -1359,9 +1378,9 @@ _setup_workspace() {
WORKSPACE_FOLDER_ID="${ROOT_FOLDER}"
WORKSPACE_FOLDER_NAME="${ROOT_FOLDER_NAME}"
else
- WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}" "${ACCESS_TOKEN}")" ||
+ WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}")" ||
{ printf "%s\n" "${WORKSPACE_FOLDER_ID}" 1>&2 && return 1; }
- WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name "${ACCESS_TOKEN}" | _json_value name 1 1)" ||
+ WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name | _json_value name 1 1)" ||
{ printf "%s\n" "${WORKSPACE_FOLDER_NAME}" 1>&2 && return 1; }
fi
return 0
@@ -1384,12 +1403,12 @@ _setup_workspace() {
###################################################
_process_arguments() {
export API_URL API_VERSION TOKEN_URL ACCESS_TOKEN \
- LOG_FILE_ID OVERWRITE UPLOAD_MODE SKIP_DUPLICATES CURL_SPEED RETRY SOURCE_UTILS UTILS_FOLDER \
+ LOG_FILE_ID OVERWRITE UPLOAD_MODE SKIP_DUPLICATES CURL_SPEED RETRY SOURCE_UTILS UTILS_FOLDER TMPFILE \
QUIET VERBOSE VERBOSE_PROGRESS CURL_PROGRESS CURL_PROGRESS_EXTRA CURL_PROGRESS_EXTRA_CLEAR COLUMNS EXTRA_LOG PARALLEL_UPLOAD
# on successful uploads
_share_and_print_link() {
- "${SHARE:-:}" "${1:-}" "${ACCESS_TOKEN}" "${SHARE_EMAIL}"
+ "${SHARE:-:}" "${1:-}" "${SHARE_EMAIL}"
[ -z "${HIDE_INFO}" ] && {
_print_center "justify" "DriveLink" "${SHARE:+ (SHARED)}" "-"
_support_ansi_escapes && [ "$((COLUMNS))" -gt 45 ] 2>| /dev/null && _print_center "normal" '^ ^ ^' ' '
@@ -1447,7 +1466,7 @@ _process_arguments() {
"${QUIET:-_print_center}" "justify" "Folder: ${FOLDER_NAME} " "| ${NO_OF_FILES} File(s)" "=" && printf "\n"
"${EXTRA_LOG}" "justify" "Creating folder.." "-"
- { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}" "${ACCESS_TOKEN}")" && export ID; } ||
+ { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}")" && export ID; } ||
{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; }
_clear_line 1 && DIRIDS="${ID}"
@@ -1471,7 +1490,7 @@ _process_arguments() {
NEXTROOTDIRID="${__temp%%"|:_//_:|${__dir}|:_//_:|"}"
NEWDIR="${dir##*/}" && _print_center "justify" "Name: ${NEWDIR}" "-" 1>&2
- ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}" "${ACCESS_TOKEN}")" ||
+ ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}")" ||
{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; }
# Store sub-folder directory IDs and it's path for later use.
@@ -1520,7 +1539,7 @@ EOF
esac; do
_print_center "justify" "Given Input" ": ID" "="
"${EXTRA_LOG}" "justify" "Checking if id exists.." "-"
- json="$(_drive_info "${gdrive_id}" "name,mimeType,size" "${ACCESS_TOKEN}")" || :
+ json="$(_drive_info "${gdrive_id}" "name,mimeType,size")" || :
if ! printf "%s\n" "${json}" | _json_value code 1 1 2>| /dev/null 1>&2; then
type="$(printf "%s\n" "${json}" | _json_value mimeType 1 1 || :)"
name="$(printf "%s\n" "${json}" | _json_value name 1 1 || :)"
@@ -1534,7 +1553,7 @@ EOF
*)
_print_center "justify" "Given Input" ": File ID" "="
_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n"
- _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${ACCESS_TOKEN}" "${name}" "${size}" ||
+ _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${name}" "${size}" ||
{ for _ in 1 2; do _clear_line 1; done && continue; }
;;
esac
@@ -1566,13 +1585,21 @@ main() {
_setup_arguments "${@}"
"${SKIP_INTERNET_CHECK:-_check_internet}"
- [ -n "${PARALLEL_UPLOAD}" ] && {
- { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || TMPFILE="$(pwd)/$(date +'%s').LOG"
- }
+ { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || TMPFILE="$(pwd)/$(date +'%s').LOG"
_cleanup() {
{
- [ -n "${PARALLEL_UPLOAD}" ] && rm -f "${TMPFILE:?}"*
+ rm -f "${TMPFILE:?}"*
+
+ # grab all chidren processes of access token service
+ # https://askubuntu.com/a/512872
+ token_service_pids="$(ps --ppid="${ACCESS_TOKEN_SERVICE_PID}" -o pid=)"
+ # first kill parent id, then children processes
+ kill "${ACCESS_TOKEN_SERVICE_PID}"
+ for pid in ${token_service_pids}; do
+ kill "${pid}"
+ done
+
export abnormal_exit && if [ -n "${abnormal_exit}" ]; then
printf "\n\n%s\n" "Script exited manually."
kill -9 -$$ &
diff --git a/sh/upload.sh b/sh/upload.sh
index 5702aae..e933243 100755
--- a/sh/upload.sh
+++ b/sh/upload.sh
@@ -377,15 +377,38 @@ _check_credentials() {
fi
}
- [ -z "${ACCESS_TOKEN}" ] || [ "${ACCESS_TOKEN_EXPIRY}" -lt "$(date +'%s')" ] && { _get_access_token_and_update || return 1; }
+ [ -z "${ACCESS_TOKEN}" ] || [ "${ACCESS_TOKEN_EXPIRY:-0}" -lt "$(date +'%s')" ] && { _get_access_token_and_update || return 1; }
+
+ # launch a background service to check access token and update it
+ # checks ACCESS_TOKEN_EXPIRY, try to update before 5 mins of expiry, a fresh token gets 60 mins
+ # process will be killed when script exits
+ {
+ while :; do
+ CURRENT_TIME="$(date +'%s')"
+ REMAINING_TOKEN_TIME="$((CURRENT_TIME - ACCESS_TOKEN_EXPIRY))"
+ if [ "${REMAINING_TOKEN_TIME}" -le 300 ]; then
+ # timeout after 30 seconds, it shouldn't take too long anyway
+ _timeout 30 _get_access_token_and_update || :
+ else
+ TOKEN_PROCESS_TIME_TO_SLEEP="$(if [ "${REMAINING_TOKEN_TIME}" -le 301 ]; then
+ printf "0\n"
+ else
+ printf "%s\n" "$((REMAINING_TOKEN_TIME - 300))"
+ fi)"
+ sleep "${TOKEN_PROCESS_TIME_TO_SLEEP}"
+ fi
+ sleep 1
+ done
+ } &
+ ACCESS_TOKEN_SERVICE_PID="${!}"
return 0
}
###################################################
# Setup root directory where all file/folders will be uploaded/updated
-# Globals: 6 variables, 5 functions
-# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET, ACCESS_TOKEN
+# Globals: 5 variables, 5 functions
+# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET
# Functions - _print, _drive_info, _extract_id, _update_config, _json_value
# Arguments: 1
# ${1}" = Positive integer ( amount of time in seconds to sleep )
@@ -397,7 +420,7 @@ _check_credentials() {
###################################################
_setup_root_dir() {
_check_root_id() {
- _setup_root_dir_json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id" "${ACCESS_TOKEN}")"
+ _setup_root_dir_json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id")"
if ! rootid_setup_root_dir="$(printf "%s\n" "${_setup_root_dir_json}" | _json_value id 1 1)"; then
if printf "%s\n" "${_setup_root_dir_json}" | grep "File not found" -q; then
"${QUIET:-_print_center}" "justify" "Given root folder" " ID/URL invalid." "=" 1>&2
@@ -412,7 +435,7 @@ _setup_root_dir() {
return 0
}
_check_root_id_name() {
- ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" "${ACCESS_TOKEN}" | _json_value name 1 1 || :)"
+ ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" | _json_value name 1 1 || :)"
"${1:-:}" ROOT_FOLDER_NAME "${ROOT_FOLDER_NAME}" "${CONFIG}"
return 0
}
@@ -436,8 +459,8 @@ _setup_root_dir() {
# Setup Workspace folder
# Check if the given folder exists in google drive.
# If not then the folder is created in google drive under the configured root folder.
-# Globals: 3 variables, 3 functions
-# Variables - FOLDERNAME, ROOT_FOLDER, ACCESS_TOKEN
+# Globals: 2 variables, 3 functions
+# Variables - FOLDERNAME, ROOT_FOLDER
# Functions - _create_directory, _drive_info, _json_value
# Arguments: None
# Result: Read Description
@@ -447,9 +470,9 @@ _setup_workspace() {
WORKSPACE_FOLDER_ID="${ROOT_FOLDER}"
WORKSPACE_FOLDER_NAME="${ROOT_FOLDER_NAME}"
else
- WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}" "${ACCESS_TOKEN}")" ||
+ WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}")" ||
{ printf "%s\n" "${WORKSPACE_FOLDER_ID}" 1>&2 && return 1; }
- WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name "${ACCESS_TOKEN}" | _json_value name 1 1)" ||
+ WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name | _json_value name 1 1)" ||
{ printf "%s\n" "${WORKSPACE_FOLDER_NAME}" 1>&2 && return 1; }
fi
return 0
@@ -472,12 +495,12 @@ _setup_workspace() {
###################################################
_process_arguments() {
export API_URL API_VERSION TOKEN_URL ACCESS_TOKEN \
- LOG_FILE_ID OVERWRITE UPLOAD_MODE SKIP_DUPLICATES CURL_SPEED RETRY SOURCE_UTILS UTILS_FOLDER \
+ LOG_FILE_ID OVERWRITE UPLOAD_MODE SKIP_DUPLICATES CURL_SPEED RETRY SOURCE_UTILS UTILS_FOLDER TMPFILE \
QUIET VERBOSE VERBOSE_PROGRESS CURL_PROGRESS CURL_PROGRESS_EXTRA CURL_PROGRESS_EXTRA_CLEAR COLUMNS EXTRA_LOG PARALLEL_UPLOAD
# on successful uploads
_share_and_print_link() {
- "${SHARE:-:}" "${1:-}" "${ACCESS_TOKEN}" "${SHARE_EMAIL}"
+ "${SHARE:-:}" "${1:-}" "${SHARE_EMAIL}"
[ -z "${HIDE_INFO}" ] && {
_print_center "justify" "DriveLink" "${SHARE:+ (SHARED)}" "-"
_support_ansi_escapes && [ "$((COLUMNS))" -gt 45 ] 2>| /dev/null && _print_center "normal" '^ ^ ^' ' '
@@ -535,7 +558,7 @@ _process_arguments() {
"${QUIET:-_print_center}" "justify" "Folder: ${FOLDER_NAME} " "| ${NO_OF_FILES} File(s)" "=" && printf "\n"
"${EXTRA_LOG}" "justify" "Creating folder.." "-"
- { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}" "${ACCESS_TOKEN}")" && export ID; } ||
+ { ID="$(_create_directory "${input}" "${NEXTROOTDIRID}")" && export ID; } ||
{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; }
_clear_line 1 && DIRIDS="${ID}"
@@ -559,7 +582,7 @@ _process_arguments() {
NEXTROOTDIRID="${__temp%%"|:_//_:|${__dir}|:_//_:|"}"
NEWDIR="${dir##*/}" && _print_center "justify" "Name: ${NEWDIR}" "-" 1>&2
- ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}" "${ACCESS_TOKEN}")" ||
+ ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}")" ||
{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; }
# Store sub-folder directory IDs and it's path for later use.
@@ -608,7 +631,7 @@ EOF
esac; do
_print_center "justify" "Given Input" ": ID" "="
"${EXTRA_LOG}" "justify" "Checking if id exists.." "-"
- json="$(_drive_info "${gdrive_id}" "name,mimeType,size" "${ACCESS_TOKEN}")" || :
+ json="$(_drive_info "${gdrive_id}" "name,mimeType,size")" || :
if ! printf "%s\n" "${json}" | _json_value code 1 1 2>| /dev/null 1>&2; then
type="$(printf "%s\n" "${json}" | _json_value mimeType 1 1 || :)"
name="$(printf "%s\n" "${json}" | _json_value name 1 1 || :)"
@@ -622,7 +645,7 @@ EOF
*)
_print_center "justify" "Given Input" ": File ID" "="
_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n"
- _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${ACCESS_TOKEN}" "${name}" "${size}" ||
+ _clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${name}" "${size}" ||
{ for _ in 1 2; do _clear_line 1; done && continue; }
;;
esac
@@ -654,13 +677,21 @@ main() {
_setup_arguments "${@}"
"${SKIP_INTERNET_CHECK:-_check_internet}"
- [ -n "${PARALLEL_UPLOAD}" ] && {
- { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || TMPFILE="$(pwd)/$(date +'%s').LOG"
- }
+ { command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || TMPFILE="$(pwd)/$(date +'%s').LOG"
_cleanup() {
{
- [ -n "${PARALLEL_UPLOAD}" ] && rm -f "${TMPFILE:?}"*
+ rm -f "${TMPFILE:?}"*
+
+ # grab all chidren processes of access token service
+ # https://askubuntu.com/a/512872
+ token_service_pids="$(ps --ppid="${ACCESS_TOKEN_SERVICE_PID}" -o pid=)"
+ # first kill parent id, then children processes
+ kill "${ACCESS_TOKEN_SERVICE_PID}"
+ for pid in ${token_service_pids}; do
+ kill "${pid}"
+ done
+
export abnormal_exit && if [ -n "${abnormal_exit}" ]; then
printf "\n\n%s\n" "Script exited manually."
kill -9 -$$ &