From 0381ca4616371ad68e7d0ccf083839957ff67792 Mon Sep 17 00:00:00 2001 From: Ulf 'Tiggi' Tigerstedt Date: Thu, 30 Jun 2022 09:44:47 +0300 Subject: [PATCH] Fix various typos and line endings --- a-access | 124 ++++++++++++++---------------------- a-check | 152 ++++++++++++++++++++++---------------------- a-delete | 103 ++++++++++++------------------ a-encrypt | 76 ++++++++-------------- a-find | 68 ++++++++++---------- a-flip | 64 +++++++++---------- a-put | 2 +- a-stream | 31 +-------- allas-dir-to-bucket | 1 + allas_conf | 5 +- 10 files changed, 263 insertions(+), 363 deletions(-) diff --git a/a-access b/a-access index 7b5cdc4..d5cb9f6 100755 --- a/a-access +++ b/a-access @@ -1,36 +1,36 @@ -#!/bin/bash +#!/bin/bash #function to check that swift works check_swift_connection () { test=$(rclone about ${storage_server}: 2> /dev/null | wc -l) #test=$(swift stat 2> /dev/null | grep -c "Account:") - + if [[ $test -lt 1 ]] - then + then #if [ -n "$ACTIVE_TOKEN" ]; then # unset OS_AUTH_TOKEN # export OS_AUTH_TOKEN=$(check_atoken) # #echo "New OS_AUTH_TOKEN = $OS_AUTH_TOKEN" #fi - - if [[ -n "$OS_PASSWORD" ]]; then - if [[ $silent -eq 0 ]] ; then + + if [[ -n "$OS_PASSWORD" ]]; then + if [[ $silent -eq 0 ]] ; then echo "Updating token" fi - source $allas_conf_path --user $user -k $OS_PROJECT_NAME -f + source $allas_conf_path --user $user -k $OS_PROJECT_NAME -f fi test=$(swift stat 2> /dev/null | grep -c "Account:") if [[ $test -lt 1 ]] - then + then echo "No connection to Allas!" echo "Please try setting up the connection again." exit 1 else echo "swift connection updated" fi - else - echo "swift connection OK" - fi + else + echo "swift connection OK" + fi } #Function to remove the trailing / if it exist @@ -72,44 +72,44 @@ mode="swift" #Process command line while [[ $# -ge 1 ]] do - case "$1" in + case "$1" in '--bucket' | '-b' ) - bucket=($2) + bucket=($2) + shift shift - shift ;; '+r' | '+read' ) add_read_project="$2" shift - shift + shift ;; '+w' | '+write' ) add_write_project="$2" - + + shift shift - shift ;; '+rw' |'+wr' | '+read-write' ) add_read_project="$2" add_write_project="$2" shift - shift + shift ;; '-r' | '-read' ) remove_read_project="$2" shift - shift + shift ;; '-w' | '-write' ) remove_write_project="$2" shift - shift + shift ;; '-rw' |'-wr' | '-read-write' ) remove_read_project="$2" remove_write_project="$2" shift - shift + shift ;; '+p' | '+public' ) public=1 @@ -134,20 +134,20 @@ do ;; esac done - + if [ $print_help -eq 1 ]; then cat < /dev/null | wc -l ) if [[ $bnrows -eq 0 ]]; then echo "Bucket $bucket was not found in your current Allas project!" - exit 1 -fi + exit 1 +fi @@ -213,26 +213,26 @@ project_label=$(echo ${os_project_name} | sed -e s/"project_"/""/g) # check connection -check_swift_connection +check_swift_connection ## Make sure that user project has read and write permissions if [[ $add_read_project != "" || $add_write_project != "" ]]; then user_read=$(swift stat $bucket | awk '{ if ($1=="Read") if ($2=="ACL:") print $3}'| grep -c "${OS_PROJECT_NAME}") if [[ $user_read -lt 1 ]];then - read_acl=$(swift stat $bucket | awk '{ if ($1=="Read") if ($2=="ACL:") print $3}' | sed -e s/"${OS_PROJECT_NAME}:\*"/""/g ) + read_acl=$(swift stat $bucket | awk '{ if ($1=="Read") if ($2=="ACL:") print $3}' | sed -e s/"${OS_PROJECT_NAME}:\*"/""/g ) read_acl="${OS_PROJECT_NAME}:*,$read_acl" swift post "$bucket" -r "${read_acl}" bnrows=$(swift stat "${bucket}_segments" | wc -l 2> /dev/null ) - if [[ $bnrows -eq 0 ]]; then + if [[ $bnrows -eq 0 ]]; then echo "Creating bucket: ${bucket}_segments in case over 5 GB files will be uploaded to the bucket." rclone mkdir "${storage_server}:${bucket}_segments" - fi + fi swift post "${bucket}_segments" -r "${read_acl}" 2> /dev/null fi user_write=$(swift stat $bucket | awk '{ if ($1=="Write") if ($2=="ACL:") print $3}'| grep -c "${OS_PROJECT_NAME}") if [[ $user_write -lt 1 ]]; then - write_acl=$(swift stat $bucket | awk '{ if ($1=="Write") if ($2=="ACL:") print $3}' | sed -e s/"${OS_PROJECT_NAME}:\*"/""/g ) + write_acl=$(swift stat $bucket | awk '{ if ($1=="Write") if ($2=="ACL:") print $3}' | sed -e s/"${OS_PROJECT_NAME}:\*"/""/g ) write_acl="${OS_PROJECT_NAME}:*,$write_acl" echo "swift post $bucket -w ${write_acl}" swift post "$bucket" -w "${write_acl}" @@ -240,44 +240,44 @@ if [[ $add_read_project != "" || $add_write_project != "" ]]; then if [[ $bnrows -eq 0 ]]; then echo "Creating bucket: ${bucket}_segments in case over 5 GB files will be uploaded to the bucket." rclone mkdir "${storage_server}:${bucket}_segments" - fi + fi swift post "${bucket}_segments" -w "${write_acl}" 2> /dev/null fi fi ### Add new settings if [[ $add_read_project != "" ]] ; then - read_acl=$(swift stat $bucket | awk '{ if ($1=="Read") if ($2=="ACL:") print $3}' | sed -e s/"${add_read_project}:\*"/""/g ) + read_acl=$(swift stat $bucket | awk '{ if ($1=="Read") if ($2=="ACL:") print $3}' | sed -e s/"${add_read_project}:\*"/""/g ) read_acl="${add_read_project}:*,$read_acl" swift post "$bucket" -r "${read_acl}" bnrows=$(swift stat "${bucket}_segments" | wc -l 2> /dev/null ) - if [[ $bnrows -eq 0 ]]; then + if [[ $bnrows -eq 0 ]]; then echo "Creating bucket: ${bucket}_segments in case over 5 GB files will be uploaded to the bucket." rclone mkdir "${storage_server}:${bucket}_segments" - fi + fi swift post "${bucket}_segments" -r "${read_acl}" 2> /dev/null fi if [[ $add_write_project != "" ]] ; then - write_acl=$(swift stat $bucket | awk '{ if ($1=="Write") if ($2=="ACL:") print $3}' | sed -e s/"${add_read_project}:\*"/""/g ) + write_acl=$(swift stat $bucket | awk '{ if ($1=="Write") if ($2=="ACL:") print $3}' | sed -e s/"${add_read_project}:\*"/""/g ) write_acl="${add_write_project}:*,$write_acl" swift post "$bucket" -w "${write_acl}" bnrows=$(swift stat "${bucket}_segments" | wc -l 2> /dev/null ) if [[ $bnrows -eq 0 ]]; then echo "Creating bucket: ${bucket}_segments in case over 5 GB files will be uploaded to the bucket." rclone mkdir "${storage_server}:${bucket}_segments" - fi + fi swift post "${bucket}_segments" -w "${write_acl}" 2> /dev/null fi if [[ $remove_read_project != "" ]] ; then - read_acl=$(swift stat $bucket | awk '{ if ($1=="Read") if ($2=="ACL:") print $3}' | sed -e s/"${remove_read_project}:\*"/""/g ) + read_acl=$(swift stat $bucket | awk '{ if ($1=="Read") if ($2=="ACL:") print $3}' | sed -e s/"${remove_read_project}:\*"/""/g ) swift post "$bucket" -r "${read_acl}" swift post "${bucket}_segments" -r "${read_acl}" 2> /dev/null fi if [[ $remove_write_project != "" ]] ; then - write_acl=$(swift stat $bucket | awk '{ if ($1=="Write") if ($2=="ACL:") print $3}' | sed -e s/"${remove_write_project}:\*"/""/g ) + write_acl=$(swift stat $bucket | awk '{ if ($1=="Write") if ($2=="ACL:") print $3}' | sed -e s/"${remove_write_project}:\*"/""/g ) swift post "$bucket" -w "${write_acl}" swift post "${bucket}_segments" -w "${write_acl}" 2> /dev/null fi @@ -291,52 +291,26 @@ if [[ $public -gt 0 ]];then fi swift post "$bucket" -r "${read_acl}" bnrows=$(swift stat "${bucket}_segments" | wc -l 2> /dev/null ) - if [[ $bnrows -eq 0 ]]; then + if [[ $bnrows -eq 0 ]]; then echo "Creating bucket: ${bucket}_segments in case over 5 GB files will be uploaded to the bucket." rclone mkdir "${storage_server}:${bucket}_segments" - fi + fi swift post "${bucket}_segments" -r "${read_acl}" 2> /dev/null fi if [[ $public -lt 0 ]];then read_acl=$(swift stat $bucket | awk '{ if ($1=="Read") if ($2=="ACL:") print $3}' | sed -e s/".rlistings"/""/g| sed -e s/".r:\*"/""/g ) echo $read_acl - swift post "$bucket" -r "${read_acl}" + swift post "$bucket" -r "${read_acl}" swift post "${bucket}_segments" -r "${read_acl}" 2> /dev/null fi echo "----------------------------------------------------------" -echo "Projects that have read access to bucket $bucket:" +echo "Projects that have read access to bucket $bucket:" swift stat $bucket | awk '{ if ($1=="Read") if ($2=="ACL:") print $3}' | sed -e s/".r:\*"/"Public access"/g | sed -e s/".rlistings,"/""/g | sed -e s/".rlistings"/""/g | tr "," "\n" | tr -d ":,*" | awk '{ print " "$0}' echo "----------------------------------------------------------" -echo "Projects that have write access to bucket $bucket:" +echo "Projects that have write access to bucket $bucket:" swift stat $bucket | awk '{ if ($1=="Write") if ($2=="ACL:") print $3}' | tr "," "\n" | tr -d ":,*" | awk '{ print " "$0}' echo "----------------------------------------------------------" exit - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/a-check b/a-check index e7325cf..426cea3 100755 --- a/a-check +++ b/a-check @@ -1,4 +1,4 @@ -#!/bin/bash +#!/bin/bash #function to check that swift works check_swift_connection () { @@ -11,15 +11,15 @@ check_swift_connection () { fi test=$(rclone about ${storage_server}: 2> /dev/null | wc -l) - + if [[ $test -lt 1 ]] - then + then echo "No connection to Allas!" echo "Please try setting up the connection again." exit 1 - else - echo "swift connection OK" - fi + else + echo "swift connection OK" + fi } #Function to remove the trailing / if it exist @@ -117,7 +117,7 @@ max_files=100000 if [[ -e $HOME/.a_tools_conf ]]; then customized=1 source $HOME/.a_tools_conf -else +else customized=0 fi @@ -145,7 +145,7 @@ do tmp_file="$2" slashcheck=$(echo $tmp_file | grep -c "/") if [[ $slashcheck -gt 0 ]]; then - echo "Slash characters (/) are not allowed when object name is defiend with -o option" + echo "Slash characters (/) are not allowed when object name is defiend with -o option" echo "If you want to use slash characters to define a pseudo folder path, add that part of" echo "object name to the bucket definition (-b):" echo @@ -163,7 +163,7 @@ do '--compress' | '-c') compression=1 shift - ;; + ;; '--nc' | '-n' ) compression=0 free_space_check=0 @@ -184,7 +184,7 @@ do ;; '--tmpdir' | '-t' ) tmp_root=("$2") - tmp_dir=("${tmp_root}/a_put_$$_tmp") + tmp_dir=("${tmp_root}/a_put_$$_tmp") shift shift ;; @@ -203,15 +203,15 @@ do '--input-list') list_file=$2 if [[ -e $list_file ]];then - input_def=("$(cat $list_file)") - else + input_def=("$(cat $list_file)") + else echo "Import file list $list_file not found" - exit 1 - fi + exit 1 + fi shift shift ;; - '--asis' | '-a' ) + '--asis' | '-a' ) compression=0 free_space_check=0 asis_mode=1 @@ -222,16 +222,16 @@ do meta_message=$2 shift shift - ;; - + ;; + '--no-ameta') include_ameta=0 shift - ;; + ;; '--follow-links' ) tar_extra_options="-h" shift - ;; + ;; '-e' | '--encrypt' ) if [[ $2 == "c4gh" || $2 == "crypt4gh" ]];then if [[ $(which crypt4gh 2> /dev/null | wc -l ) -ne 1 ]];then @@ -241,7 +241,7 @@ do exit 1 fi encrypt="crypt4gh" - fi + fi if [[ $2 == "gpg" ]];then if [[ $(which gpg 2> /dev/null | wc -l ) -ne 1 ]];then echo "" @@ -251,20 +251,20 @@ do fi encrypt="gpg" fi - shift + shift shift ;; '--pk' | '--public-key' ) # query file public_key=$(abspath "$2") - if [[ -e $public_key ]];then + if [[ -e $public_key ]];then echo Public key: "$public_key" all_keys=$(echo -en "$all_keys\t--recipient_pk\t$public_key\t") echo $all_keys else echo "Public key $public_key not found" - exit 1 - fi + exit 1 + fi shift shift ;; @@ -280,7 +280,7 @@ do compression=0 sdx=1 asis_mode=1 - fnum=0 + fnum=0 shift ;; @@ -305,7 +305,7 @@ cat < Define a name of the bucket into +-b, --bucket Define a name of the bucket into which the data is uploaded. --p, --project Upload data into buckets of the defined - project in stead of the currently +-p, --project Upload data into buckets of the defined + project in stead of the currently configured project. --o, --object Define a name for the new object to be +-o, --object Define a name for the new object to be created. --S, --s3cmd Use S3 protocol in stead of swift protocol +-S, --s3cmd Use S3 protocol in stead of swift protocol for upoload. -n, --nc Do not compress the data that will be uploaded. - (This is now the default mode thus this option is + (This is now the default mode thus this option is no longer needed). -c, --compress The data is compressed using zstdmt command before - upload. - + upload. + -h, --help Print this help. --t, --tmpdir Define a direcrory that will be used to store +-t, --tmpdir Define a direcrory that will be used to store temporary files of the upload process. -s, --silent Less output @@ -352,29 +352,29 @@ a-check command line options: -u, --user Define username liked to the data to be uploaded (default: current username) ---skip-filelist Do not collect information about the files that +--skip-filelist Do not collect information about the files that the object contains to the metadata file. - Using this option speeds up the upload process - significantly if the directory to be uploaded - contains large amount of files. However, a-find + Using this option speeds up the upload process + significantly if the directory to be uploaded + contains large amount of files. However, a-find can't be used to locate objects uploaded this way. ---no-ameta Don't create metadata objects ( _ameta ) for the +--no-ameta Don't create metadata objects ( _ameta ) for the stored data objects. -m, --message "your notes" Add a one line text note to the metadata object. --override Allow overwriting existing objects. ---input-list Give a file that lists the files or directtories +--input-list Give a file that lists the files or directtories to be uploaded to Allas. Each item will be stored as one object. -a, --asis Copy the given file or content of a directory to Allas - without compression and packing so that each file in the + without compression and packing so that each file in the directory will be copied to Allas as an individual object. - The object name contrains the relative path of the file to - be copied. + The object name contrains the relative path of the file to + be copied. --follow-links When uploading a directory, include linked files as real files in sead of links. @@ -384,9 +384,9 @@ a-check command line options: --pk, --public-key Public key used for crypt4gh encryption. --sdx Upload data to Allas in format format that is compatible with - the CSC Sensitive data services: The files are encrypted with - crypt4gh using CSC public key after which the files are inported - to Allas as individual objects as in --asis format. + the CSC Sensitive data services: The files are encrypted with + crypt4gh using CSC public key after which the files are inported + to Allas as individual objects as in --asis format. With --public-key you can do the encryption with both CSC and your own public key. By default data is stored to bucket with name: your-projecyt-number_SD-CONNECT, @@ -395,18 +395,18 @@ a-check command line options: Related commands: a-find, a-get, a-delete, a-info EOF -exit +exit -fi +fi -# note about customization +# note about customization if [[ $silent -eq 0 ]]; then if [[ $customized -eq 1 ]]; then echo "Customer settings red from $HOME/.a_tools_conf" fi -fi +fi -##Assign project to be used if not defined +##Assign project to be used if not defined #if [[ $os_project_name == "" ]] #then # if [ -e $HOME/.allas_default ] @@ -441,7 +441,7 @@ if [[ $(which rclone 2> /dev/null | wc -l ) -ne 1 ]];then fi # s3cmd mode -if [[ $mode == "s3cmd" ]]; then +if [[ $mode == "s3cmd" ]]; then storage_server="s3allas" fi @@ -469,7 +469,7 @@ if [[ $asis_mode -eq 1 ]];then exit 1 fi #For field separator changed to allow spaces in file names - #IFS contains the default field separator + #IFS contains the default field separator SAVEIFS="$IFS" #input_def=($(echo $input_def)) IFS=$(echo -en "\t\n\b") @@ -482,8 +482,8 @@ if [[ $asis_mode -eq 1 ]];then echo "Input definiton: $one_input_def" echo "dot't return any files to input" exit 1 - fi -fi + fi +fi mkdir $tmp_dir printf "%18s %25s %6s %8s %25s\n" "Date" "Name" "Files" "Size(kB)" "Location in Allas" >> ${tmp_dir}/upload.log @@ -494,9 +494,9 @@ do ##Check if connection works and update if needed and possible # if [[ $mode == "swift" ]] #then - # if [[ $silent -eq 0 ]] ; then - # check_swift_connection - # else + # if [[ $silent -eq 0 ]] ; then + # check_swift_connection + # else # check_swift_connection > /dev/null # fi #fi @@ -508,7 +508,7 @@ do if [[ ! -e $input ]] ; then echo "File or directory $input does not exist!" exit 1 - fi + fi #Remove the trailing / if it exist if [ $(echo -n $input | tail -c 1) == "/" ] @@ -516,7 +516,7 @@ do sl=$(expr ${#input} - 1) input=$(echo $input | cut -c 1-$sl) fi - + #check that file name does not end with _ameta if [[ ${input:(-6):6} == "_ameta" ]]; then echo "Found a file/directory name which ends with _ameta" @@ -541,7 +541,7 @@ do then if [[ $compression -eq 1 ]]; then tmp_file=($(basename "$input" | tr " " "_" )".zst") - else + else tmp_file=($(basename "$input" | tr " " "_" )) fi else @@ -558,22 +558,22 @@ do if [[ $compression -eq 1 ]]; then if [[ ${tmp_file: -4} != ".zst" ]]; then tmp_file="${tmp_file}.zst" - fi + fi fi else if [[ $compression -eq 1 ]]; then - if [[ ${tmp_file: -8} != ".tar.zst" ]]; then + if [[ ${tmp_file: -8} != ".tar.zst" ]]; then tmp_file="${tmp_file}.tar.zst" fi else - if [[ ${tmp_file: -4} != ".tar" ]]; then + if [[ ${tmp_file: -4} != ".tar" ]]; then tmp_file="${tmp_file}.tar" fi fi tmp_file=$(remove_slash_from_ends $tmp_file) fi fi - + # encryption name includes gpg if [[ $encrypt == "gpg" ]];then tmp_file="${tmp_file}.gpg" @@ -596,7 +596,7 @@ dmku3fKA/wrOpWntUTkkoQvknjZDisdmSwU4oFk/on0= -----END CRYPT4GH PUBLIC KEY-----" > .sdx_key_tmp_$$ fi fi - + #In case of asis-upload, partial_path is the relative path if [[ $asis_mode -eq 1 ]]; then @@ -626,7 +626,7 @@ dmku3fKA/wrOpWntUTkkoQvknjZDisdmSwU4oFk/on0= if [[ $bucket_name == "not_defined" ]]; then #default bucket_name=("${user}-${project_label}-MISC") - + ## Puhti and Mahti # In Puhti and Mahti we check if puhti-project and Allas project match #Puhti scratch @@ -689,7 +689,7 @@ dmku3fKA/wrOpWntUTkkoQvknjZDisdmSwU4oFk/on0= fi partial_path=$(dirname $file_path | sed -e s/"\/projappl\/$puhti_project"/""/g) fi - + #Puhti FMI-projappl if [ $(echo $file_path | cut -c1-13) == "/fmi/projappl" ] then @@ -731,17 +731,17 @@ dmku3fKA/wrOpWntUTkkoQvknjZDisdmSwU4oFk/on0= partial_path=$(dirname $file_path | sed -e s/"\/run\/nvme\/job_$SLURM_JOB_ID\/data"/""/g) fi partial_path=$(remove_slash_from_ends $partial_path) - fi - - #the name of the object to be created + fi + + #the name of the object to be created if [[ $partial_path == "" ]]; then target_location="${bucket_name}/${tmp_file}" else target_location="${bucket_name}/${partial_path}/$tmp_file" fi - - #Check if the object already exists + + #Check if the object already exists is_uploaded=0 if [[ $mode == "swift" ]];then if [[ $partial_path == "" ]]; then diff --git a/a-delete b/a-delete index c457916..9490e17 100755 --- a/a-delete +++ b/a-delete @@ -1,31 +1,31 @@ -#!/bin/bash +#!/bin/bash #function to check that swift works check_swift_connection () { test=$(rclone about ${storage_server}: 2> /dev/null | wc -l) #test=$(swift stat 2> /dev/null | grep -c "Account:") - + if [[ $test -lt 1 ]] - then - if [[ -n "$OS_PASSWORD" ]]; then - if [[ $silent -eq 0 ]] ; then + then + if [[ -n "$OS_PASSWORD" ]]; then + if [[ $silent -eq 0 ]] ; then echo "Updating token" fi - source $allas_conf_path --user $user -k $OS_PROJECT_NAME --mode $mode -f + source $allas_conf_path --user $user -k $OS_PROJECT_NAME --mode $mode -f fi #test=$(swift stat 2> /dev/null | grep -c "Account:") - test=$(rclone about ${storage_server}: 2> /dev/null | wc -l) + test=$(rclone about ${storage_server}: 2> /dev/null | wc -l) if [[ $test -lt 1 ]] - then + then echo "No connection to Allas!" echo "Please try setting up the connection again." exit 1 else echo "swift connection updated" fi - else - echo "swift connection OK" - fi + else + echo "swift connection OK" + fi } @@ -136,15 +136,15 @@ The basic syntax of the command is: Options: --p, --project Delete objects form the buckets of the defined project in stead of the currently configured project. +-p, --project Delete objects form the buckets of the defined project in stead of the currently configured project. -b --bucket Object name includes bucket name and the command does not try to use the default bucket names. -u, --user Option allows you to assign a user account that is used to confirm the object ownership. -f, --force Don't ask confirmation when deleting a file - ---rmb Remove empty bucket. + +--rmb Remove empty bucket. -F, --FORCE In conjunction with --rmb, this option removes a non-empty bucket. @@ -152,7 +152,7 @@ Options: Related commands: a-put, a-get, a-find, a-info EOF exit -fi +fi # if [[ $defined_user != "x" ]]; then @@ -184,7 +184,7 @@ if [[ $remove_bucket -eq 1 ]]; then if [[ $force -eq 1 ]]; then if [[ $object_name != "" ]]; then rclone rmdir ${storage_server}:"$object_name" - #swift delete "$object_name" + #swift delete "$object_name" fi fi if [[ $force -eq 0 ]]; then @@ -192,9 +192,9 @@ if [[ $remove_bucket -eq 1 ]]; then echo "Are you sure want to remove bucket:" echo $object_name echo "[y/n]" - read ansver - if [[ $ansver == "y" ]] || [[ $ansver == "yes" ]]; then - rclone rmdir ${storage_server}:"$object_name" + read answer + if [[ $answer == "y" ]] || [[ $answer == "yes" ]]; then + rclone rmdir ${storage_server}:"$object_name" #swift delete ${object_name} echo "${object_name} was deleted." else @@ -215,10 +215,10 @@ if [ $check_os -ne 1 ]; then # echo "Did you mean this object:" # echo " $objects" #fi - #if [ ${#objects[@]} -gt 1 ]; then + #if [ ${#objects[@]} -gt 1 ]; then # echo "Did you mean some of these objects:" # for on in ${objects[@]} - # do + # do # echo " $on" # done #fi @@ -227,7 +227,7 @@ if [ $check_os -ne 1 ]; then echo " a-find $object_name" fi exit 1 -else +else object_with_bucket=(1) fi @@ -236,21 +236,21 @@ fi # check if object name contains bucket (if not defined with bucket) if [[ $object_with_bucket == 0 ]]; then if [ $(echo $object_name | grep -c "${user}-${project_label}-MISC" ) -eq 1 ] - then - object_with_bucket=(1) + then + object_with_bucket=(1) bucket=("${user}-${project_label}-MISC") fi if [ $(echo $object_name | grep -c "${user}-${project_label}-HOME" ) -eq 1 ] - then - object_with_bucket=(1) + then + object_with_bucket=(1) bucket=("${user}-${project_label}-HOME") fi if [ $(echo $object_name | grep -c "${user}-${project_label}-SCRATCH" ) -eq 1 ] - then - object_with_bucket=(1) + then + object_with_bucket=(1) bucket=("${user}-${project_label}-HOME") fi -fi +fi # check all buckets if the bucket is not deifned if [ $object_with_bucket == 0 ] @@ -260,7 +260,7 @@ then # go through the buckets for bn in $buckets - do + do bucket_found=$(rclone ls ${storage_server}:$bn/$object_name 2> /dev/null | wc -l) if [ $bucket_found -eq 1 ]; then echo "Bucket: $bn contains object:$object_name" @@ -268,7 +268,7 @@ then bucket=("$bn") fi done - + if [[ $num_buckets -eq 0 ]]; then echo "" echo "Could not find object: $object_name " @@ -277,10 +277,10 @@ then if [[ $num_buckets -gt 1 ]]; then echo "" echo "Object $object_name was found in several buckets!" - echo "Please ionclude bucket name in the object name" + echo "Please include bucket name in the object name" exit 1 fi - + #add bucket to the object name object_name=("$bucket/$object_name") else @@ -293,8 +293,8 @@ else echo "You can list all the available objects with command:" echo " a-find" fi -fi - +fi + ameta_found=$(rclone ls ${storage_server}:/"${object_name}_ameta" 2> /dev/null | wc -l) if [ $ameta_found -eq 1 ]; then object_owner=$(rclone cat ${storage_server}:/"${object_name}_ameta" |grep "^user:" | awk '{print $2}') @@ -302,10 +302,10 @@ if [ $ameta_found -eq 1 ]; then if [[ $user != $object_owner ]]; then echo "" echo "Your username ($user) is not matching to the username assigned to the object ($object_owner)" - echo "You can use option -user to enforce the deteltion tool to use aspecific user name" + echo "You can use option -user to enforce the deletion tool to use a specific user name" echo "${object_name} was not deleted" exit 1 - fi + fi fi #remove object @@ -319,8 +319,8 @@ else echo "Are you sure want to remove object:" echo $object_name echo "[y/n]" - read ansver - if [[ $ansver == "y" ]] || [[ $ansver == "yes" ]]; then + read answer + if [[ $answer == "y" ]] || [[ $answer == "yes" ]]; then rclone deletefile ${storage_server}:/"${object_name}" if [ $ameta_found -eq 1 ]; then rclone deletefile ${storage_server}:/"${object_name}_ameta" @@ -333,28 +333,3 @@ fi exit - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/a-encrypt b/a-encrypt index 210be94..8bb897a 100755 --- a/a-encrypt +++ b/a-encrypt @@ -1,4 +1,4 @@ -#!/bin/bash +#!/bin/bash abspath() { old=`pwd`;new=$(dirname "$1");if [ "$new" != "." ]; then cd $new; fi;file=`pwd`/$(basename "$1");cd $old;echo $file; } @@ -14,7 +14,7 @@ source $inst_root/a_env_conf # local variable object_name="" #object to retrieve print_help=0 -os_project_name="$OS_PROJECT_NAME" +os_project_name="$OS_PROJECT_NAME" mode="swift" tmp_dir="${tmp_root}/a_get_$$_tmp" show_filelist=0 @@ -76,14 +76,14 @@ do '-p' | '--pk' | '--public-key' ) # query file public_key=$(abspath "$2") - if [[ -e $public_key ]];then + if [[ -e $public_key ]];then echo Public key: "$public_key" all_keys=$(echo -en "$all_keys --recipient_pk $public_key ") echo $all_keys else echo "Public key $public_key not found" - exit 1 - fi + exit 1 + fi shift shift ;; @@ -113,11 +113,11 @@ The basic syntax of the command is: a-encrypt object_name a-encryp command streams the object to the local computer where crypt4gh encryption is applied -to the data stream. The encrypted data is then streamed back to Allas into a new object. +to the data stream. The encrypted data is then streamed back to Allas into a new object. By default the object is encrypted with CSC public key only. The encrypted object is located to the same bucket as the original object. Suffix: .c4gh is added to the object name. -The main purpose of this tool is to make a file, uploaded to the Allas service, compatible with the +The main purpose of this tool is to make a file, uploaded to the Allas service, compatible with the Sensitive data services of CSC. Options: @@ -130,12 +130,12 @@ Options: This option allows you to include additional public keys so that data can be used outside CSC sensitive data computing environment too. ---s3cmd Use S3 protocol and s3cmd command for data retrieval in stead of +--s3cmd Use S3 protocol and s3cmd command for data retrieval in stead of Swift protocol and rclone. -s, --suffix Define your own suffix in stead of the default suffix (.c4gh) --a, --all Process all the objects that include the given name in the beginning of +-a, --all Process all the objects that include the given name in the beginning of object name. Related commands: a-put, a-find, a-info, a-delete @@ -149,17 +149,17 @@ Examples: 2. Make encrypted copies of all objects in bucket project_12345_data to bucket project_12345_sd a-encrypt project_12345_data --all --bucket project_12345_sd - + EOF exit 0 -fi +fi if [[ $object_def == "" ]] then - echo "Please give the object to retrieve:" >&2 - read object_def + echo "Please give the object to retrieve:" >&2 + read object_def fi project_label=$(echo ${os_project_name} | sed -e s/"project_"/""/g) @@ -178,14 +178,14 @@ then test=$(rclone about ${storage_server}: 2> /dev/null | wc -l) #test=$(swift stat 2> /dev/null | grep -c "Account:") if [[ $test -lt 1 ]] - then + then echo "No connection to Allas!" >&2 echo "Please try setting the connection again." >&2 - echo "by running command:" >&2 + echo "by running command:" >&2 echo "" >&2 echo " source $allas_conf_path" >&2 exit 1 - fi + fi fi #Rclone through s3 @@ -196,27 +196,27 @@ fi #source /appl/opt/allas_conf #input=("$1") - + ## object list creation depends on if --all if [[ $all -eq 1 ]];then object_list=($(a-list $object_def 2> /dev/null)) else b_check=$(echo $object_def | grep -c "/") - if [[ $b_check -eq 1 ]]; then - object_list="$object_def" + if [[ $b_check -eq 1 ]]; then + object_list="$object_def" else echo "Definition $object_def did not define a single object" echo "Please use option --all if you want to encryp all objects in a bucket" echo "or seveal objecs, whos name sarts with same string" - exit 1 + exit 1 fi fi #check that object list contains something check_os=$(echo  ${object_list[@]} | wc -c) -echo ${object_list[@]} +echo ${object_list[@]} echo check_os: $check_os if [[ $check_os -lt 2 ]]; then echo "Object name: $object_name not found in $storage_server." >&2 @@ -243,7 +243,7 @@ do target_bucket=$bucket else target_bucket="$source_bucket" - fi + fi target_object="${object}${suffix}" echo "" echo "Making encrypted copy of ${source_bucket}/${object}" @@ -255,11 +255,11 @@ do if [[ "${bucket}" != "" ]]; then rclone cat "${storage_server}:$object_name" | rclone rcat ${storage_server}:${target_bucket}/${object} check_os=$(rclone ls ${storage_server}:${target_bucket}/${object} 2> /dev/null | wc -l) - else + else check_os=$(rclone ls ${storage_server}:${target_bucket}/${object} 2> /dev/null | wc -l) fi else - rclone cat -P "${storage_server}:$object_name" | crypt4gh encrypt --recipient_pk .sdx_key_tmp_$$ $all_keys | rclone rcat ${storage_server}:${target_bucket}/${target_object} + rclone cat -P "${storage_server}:$object_name" | crypt4gh encrypt --recipient_pk .sdx_key_tmp_$$ $all_keys | rclone rcat ${storage_server}:${target_bucket}/${target_object} check_os=$(rclone ls ${storage_server}:${target_bucket}/${target_object} 2> /dev/null | wc -l) fi @@ -269,7 +269,7 @@ do echo exit 1 fi - if [[ $replace -eq 1 ]]; then + if [[ $replace -eq 1 ]]; then # Do not remove already encrypted objects if [[ ${object_name:(-5):5} == "${suffix}" ]]; then if [[ "${bucket}" != "" ]]; then @@ -286,29 +286,3 @@ done rm -f .sdx_key_tmp_$$ exit 0 - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/a-find b/a-find index 50dd950..357e247 100755 --- a/a-find +++ b/a-find @@ -1,4 +1,4 @@ -#!/bin/bash +#!/bin/bash #rclone is needed if [[ $(which rclone 2> /dev/null | wc -l ) -ne 1 ]];then @@ -32,7 +32,7 @@ all_buckets=(0) if [[ -e $HOME/.a_tools_conf ]]; then customized=1 source $HOME/.a_tools_conf -else +else customized=0 fi @@ -44,7 +44,7 @@ do user=("$2") shift shift - ;; + ;; '--bucket' | '-b' ) # query file bucket_name=($2) @@ -67,7 +67,7 @@ do '--all_bukets' | '-a' ) bucket_name=("$(swift list| tr '\n' ' ')") shift - ;; + ;; '-h' | '--help' ) print_help=(1) shift @@ -84,12 +84,12 @@ do esac done -# note about customization +# note about customization if [[ $silent -eq 0 ]]; then if [[ $customized -eq 1 ]]; then echo "Customer settings red from $HOME/.a_tools_conf" fi -fi +fi @@ -108,7 +108,7 @@ The basic syntax of the command is: a-find query_term -The query term is compared to the object names as well as the names and original paths of the files that +The query term is compared to the object names as well as the names and original paths of the files that have been uploaded to Allas with a-put. The matching objects are reported (but not downloaded). The query term is processed as a regular expression where some characters, for example dot (.), have a special meaning. @@ -119,7 +119,7 @@ The most commonly occurring special characters are listed below: ^ means the beginning of a line $ means the end of a line [ ] matches any of the characters inside the brackets. For example [abc] would match a,b or c. - [^ ] matches any character, except the characters inside the brackets. + [^ ] matches any character, except the characters inside the brackets. For example [^abc] would select all rows that contain also other characters than just a,b and c. * matches zero or more of the preceding character or expression @@ -129,12 +129,12 @@ Options: -f, --files Lists the names of the matching files inside the objects in addition to the object name. --p, --project Search matches form the buckets of the defined project in stead of the currently configured project. +-p, --project Search matches form the buckets of the defined project in stead of the currently configured project. --b, --bucket By default all the standard buckets, used by a-put, are searched. Option --bucket allows you to specify a +-b, --bucket By default all the standard buckets, used by a-put, are searched. Option --bucket allows you to specify a single bucket that will be used for the search. --a, --all By default all the standard buckets, used by a-put, are searched. Option --all defines +-a, --all By default all the standard buckets, used by a-put, are searched. Option --all defines that all the buckets of the project will be included in the search. -s, --silent Output just the object names and number of hits. If -file option is included, @@ -146,9 +146,9 @@ EOF exit -fi +fi -#Assign project to be used if not defined +#Assign project to be used if not defined if [ $os_project_name == "" ] then if [ -e $HOME/.allas_default ] @@ -173,18 +173,18 @@ then test=$(rclone about ${storage_server}: 2> /dev/null | wc -l) #test=$(swift stat 2> /dev/null | grep -c "Account:") if [[ $test -lt 1 ]] - then + then echo "No connection to Allas!" echo "Please try setting up the connection again" echo "by running the command:" echo "" echo " source $allas_conf_path" exit 1 - fi + fi fi -#define standatrd buckests +#define standatrd buckests std_buckets=("${user}-${project_label}-MISC ${user}-${project_label}-HOME ${project_label}-puhti-SCRATCH ${project_label}-puhti-PROJAPPL ${user}-${project_label}-pub ${user}-${project_label}-flip") #check all buckerts if a specific bucket is not defined @@ -193,7 +193,7 @@ then buckets=$std_buckets #echo $buckets else - buckets=$bucket_name + buckets=$bucket_name fi @@ -202,12 +202,12 @@ fi if [[ $query == "" ]] then echo "" - echo "No query term given!" + echo "No query term given!" echo "Listing all objects but not files within them:" echo " " for bn in $buckets - do + do for metafile in $(rclone ls ${storage_server}:$bn 2> /dev/null | awk '{print $2}' | grep "_ameta$") do os_dirname=$(dirname "$metafile" ) @@ -235,26 +235,26 @@ total_objects=(0) object_hits=(0) total_object_hits=(0) for bn in $buckets -do +do if [ $silent -eq 0 ]; then echo "----------------------------------------------" - echo "Cheking bucket: $bn" + echo "Cheking bucket: $bn" fi rclone ls ${storage_server}:$bn 2> /dev/null | awk '{print $2}' | grep -v "_ameta$" | grep "$query" > ./$$_find_tmp object_hits=$(cat ./$$_find_tmp | wc -l) if [[ $object_hits -gt 0 ]];then if [ $silent -eq 0 ]; then echo "" - echo "Matching object names:" - fi - awk '{ print "'$bn/'" $0}' ./$$_find_tmp ; rm -f ./$$_find_tmp + echo "Matching object names:" + fi + awk '{ print "'$bn/'" $0}' ./$$_find_tmp ; rm -f ./$$_find_tmp (( total_object_hits = total_object_hits + object_hits )) if [ $silent -eq 0 ]; then echo "----------------------------------------------" echo "a-put generated objects that include matching file name:";echo "" fi - - fi + + fi for metafile in $(rclone ls ${storage_server}:$bn 2> /dev/null | awk '{print $2}' | grep "_ameta$") do @@ -270,24 +270,24 @@ do os_object=("${bn}${os_dirname}/${os_filename}") if [ $show_filelist -eq 1 ]; then if [ $silent -eq 0 ]; then - echo "Matching files in object $os_object :" + echo "Matching files in object $os_object :" rclone cat ${storage_server}:${bn}/$metafile | awk '{print $9}' | grep "$query" echo "" else #silent output - rclone cat ${storage_server}:${bn}/$metafile | awk '{print $9}' | grep "$query" | awk '{print "'$os_object' "$0}' - fi - else + rclone cat ${storage_server}:${bn}/$metafile | awk '{print $9}' | grep "$query" | awk '{print "'$os_object' "$0}' + fi + else if [ $silent -eq 0 ]; then - echo "" + echo "" #echo " Object: ${bn}/$(dirname "$metafile")/$(basename $metafile _ameta)" echo " Object: $os_object" echo " includes $num_hits file names that that match query: $query" echo "" else echo $os_object $num_hits - fi - fi + fi + fi (( total_objects = total_objects + 1 )) (( total_hits = total_hits + num_hits )) fi @@ -300,7 +300,7 @@ if [ $silent -eq 0 ]; then echo "Query: $query" echo "Nuber of object names matching: $total_object_hits" echo "" - echo "Total of $total_hits matching file names were found" + echo "Total of $total_hits matching file names were found" echo "in $total_objects objects uploaded with a-put" echo "-------------------------------------------------" fi diff --git a/a-flip b/a-flip index 4be2fdd..b2702d4 100755 --- a/a-flip +++ b/a-flip @@ -1,4 +1,4 @@ -#!/bin/bash +#!/bin/bash #default user @@ -51,9 +51,9 @@ if [ $print_help -eq 1 ]; then cat < /dev/null | wc -l) #test=$(swift stat 2> /dev/null | grep -c "Account:") if [[ $test -lt 1 ]] - then + then echo "No connection to Allas!" echo "Please try setting up the connection again." exit 1 fi -else +else echo "a-flip is available only in swift mode" fi @@ -109,19 +109,19 @@ bucket_name=("${user}-${project_label}-flip") mkdir $tmp_dir for input in $input_def -do +do echo "Processing: $input" if [ ! -e $input ] ; then echo "File: $input does not exist!" exit 1 fi - + if [[ $(file -b $input | grep -c directory ) -eq 1 ]] then echo "This command can only be used to publish files, not directories." exit 1 fi - + #check that file name does not end with _ameta if [[ ${input:(-5):5} == "_ameta" ]]; then echo "Found a file/directoryname which ends with _ameta" @@ -144,7 +144,7 @@ do fi - #Check if stored file already exitst + #Check if stored file already exitst #echo "rclone ls ${storage_server}:${bucket_name}/$tmp_file" if [[ $(rclone ls ${storage_server}:${bucket_name}/$tmp_file 2> /dev/null | wc -c) -gt 2 ]] @@ -153,7 +153,7 @@ do echo "A file/directory with the same name has already been uploaded into" echo "bucket $bucket_name in $storage_server" echo "" - rclone lsl ${storage_server}:${bucket_name}/$tmp_file + rclone lsl ${storage_server}:${bucket_name}/$tmp_file echo "" echo "Do you wish to overwrite the existing old object? [y/n]" read overwrite_old @@ -170,19 +170,19 @@ do echo "original_location: $file_path" >> ${tmp_dir}/${tmp_file}_ameta echo "" ls -l $input >> ${tmp_dir}/${tmp_file}_ameta - + if [[ $tot_size -gt $max_size ]] - then + then echo "This file or directory is too big for this tool" echo "Total size: ${tot_size}K" echo "Please use swift or rclone command to upload the data to allas" rm -f ${tmp_dir}/${tmp_file}_ameta - rmdir ${tmp_dir} + rmdir ${tmp_dir} exit 1 - fi + fi # if [[ $tot_size -gt $free_space ]] -# then +# then # echo "There is not enough space for the temporary files." # echo "$input contains $num_files files or directories that take ${tot_size}K of disk space" # echo "Available free space is ${free_space}K" @@ -201,7 +201,7 @@ do if [ $mode == "swift" ] then # For less than 5GB files rclone is used for uploading - + echo "Uploading data to allas." # echo "rclone copy --progress ${tmp_dir}/$tmp_file ${storage_server}:${bucket_name}/${partial_path}" rclone copy -L --progress ${tmp_dir}/$tmp_file ${storage_server}:${bucket_name} @@ -209,9 +209,9 @@ do if [ $exitcode -ne 0 ]; then echo "" echo "File upload for $infile failed" - + rclone deletefile ${storage_server}:${bucket_name}/$tmp_file - rm -f ${tmp_dir}/$tmp_file + rm -f ${tmp_dir}/$tmp_file rm -f ${tmp_dir}/${tmp_file}_ameta rmdir ${tmp_dir} exit 1 @@ -221,17 +221,17 @@ do if [[ $tot_size -lt 5000000 ]] then echo "Confirming upload..." - #checksums for local and allas files + #checksums for local and allas files sum1=($(md5sum ${tmp_dir}/$tmp_file)) sum2=($(rclone md5sum ${storage_server}:${bucket_name}/$tmp_file)) - - #check is cheksums match + + #check is cheksums match if [[ ${sum1[0]} != ${sum2[0]} ]] - then + then echo "Upload of $input was not successfull!" echo "Cleaning the failed upload" rclone deletefile ${storage_server}:${bucket_name}/$tmp_file - rm -f ${tmp_dir}/$tmp_file + rm -f ${tmp_dir}/$tmp_file rm -f ${tmp_dir}/${tmp_file}_ameta rmdir ${tmp_dir} exit 1 @@ -247,14 +247,14 @@ do #update metadata echo "" echo "Adding metadata for uploaded $input" - #echo "rclone copy ./${tmp_file}_ameta ${storage_server}:${bucket_name}/${partial_path}" + #echo "rclone copy ./${tmp_file}_ameta ${storage_server}:${bucket_name}/${partial_path}" rclone copy ${tmp_dir}/${tmp_file}_ameta ${storage_server}:${bucket_name}/${partial_path} - + rm -f ${tmp_dir}/${tmp_file}_ameta echo "$input uploaded to ${bucket_name}" echo "Public link: https://a3s.fi/${bucket_name}/$tmp_file" - + tmp_file=("not_defined") done @@ -262,7 +262,7 @@ done #Clean old files from flip bucket for ameta_name in $(rclone lsl ${storage_server}:${bucket_name} | grep "_ameta$" | grep -v $(date +%Y-%m-%d ) | grep -v $(date -d "yesterday 13:00" +%Y-%m-%d) | awk '{ print $NF}') -do +do # echo "removing: ${object_name}." #remove ameta file rclone deletefile ${storage_server}:${bucket_name}/${ameta_name} @@ -274,7 +274,7 @@ done echo '' > ${tmp_dir}/index.html for i in $(swift list $bucket_name | grep -v '_ameta$' ) -do +do echo '
  • '$i'
  • ' >> ${tmp_dir}/index.html done echo '' >> ${tmp_dir}/index.html diff --git a/a-put b/a-put index 69e241b..d82ff1c 100755 --- a/a-put +++ b/a-put @@ -14,7 +14,7 @@ check_swift_connection () { if [[ $test -lt 1 ]] then - echo "No connection to Allas!" + Echo "No connection to Allas!" echo "Please try setting up the connection again." exit 1 else diff --git a/a-stream b/a-stream index 007d05e..2cf8e84 100755 --- a/a-stream +++ b/a-stream @@ -164,10 +164,10 @@ else fi -# check all buckets if the bucket is not deifned +# check all buckets if the bucket is not defined if [[ $object_with_bucket -eq 0 ]] then - #define standatrd buckests + #define standard buckets buckets=("${user}-${project_label}-MISC ${user}-${project_label}-HOME ${project_label}-puhti-SCRATCH ${project_label}-puhti-PROJAPPL ${user}-${project_label}-pub ${user}-${project_label}-flip") num_buckets=(0) @@ -225,7 +225,7 @@ if [[ ${object_name:(-5):5} == ".c4gh" ]]; then encrypt="crypt4gh" echo "Crypt4gh encrypted object" >&2 if [[ $secret_key == "" && $asis_mode -eq 0 ]]; then - echo "The secret key for decopression is not defined" >&2 + echo "The secret key for decompression is not defined" >&2 echo "Define the key file with option: --sk " >&2 exit fi @@ -317,28 +317,3 @@ fi exit - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/allas-dir-to-bucket b/allas-dir-to-bucket index b907bfc..1606670 100755 --- a/allas-dir-to-bucket +++ b/allas-dir-to-bucket @@ -10,6 +10,7 @@ # $1 is the location to be copied, $2 is the destination container +# TODO: Check if 500k file limit in bucket is exceeded #function to check that swift works check_swift_connection () { diff --git a/allas_conf b/allas_conf index 4b6acf2..33fdf2b 100644 --- a/allas_conf +++ b/allas_conf @@ -441,12 +441,12 @@ if $use_s3cmd; then echo "" #aws.s3 conf - mkdir -p $HOME/.aws + mkdir -m 0700 -p $HOME/.aws echo '[default]' > $HOME/.aws/credentials + chmod go-rwx $HOME/.aws/credentials echo AWS_ACCESS_KEY_ID="$ACCESS_KEY" >> $HOME/.aws/credentials echo AWS_SECRET_ACCESS_KEY="$SECRET_KEY" >> $HOME/.aws/credentials echo AWS_DEFAULT_REGION =" regionOne " >> $HOME/.aws/credentials - chmod go-rwx $HOME/.aws/credentials export PYTHONPATH=$activePythonPath @@ -454,6 +454,7 @@ if $use_s3cmd; then rclone config delete s3allas mkdir -p $HOME/.config/rclone/ echo "" >> $HOME/.config/rclone/rclone.conf + chmod go-rwx $HOME/.config/rclone/rclone.conf echo '[s3allas]' >> $HOME/.config/rclone/rclone.conf echo 'type = s3' >> $HOME/.config/rclone/rclone.conf echo 'provider = Other' >> $HOME/.config/rclone/rclone.conf