From 116ab0d45d49b26cf70fc72a4ab55b69dff7731d Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Sun, 7 Oct 2018 22:55:12 +0200 Subject: [PATCH 01/36] add: full help for all commands --- bin/mapillary_tools | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/bin/mapillary_tools b/bin/mapillary_tools index e5c39f9d..ab85f94c 100755 --- a/bin/mapillary_tools +++ b/bin/mapillary_tools @@ -7,6 +7,7 @@ from mapillary_tools import commands advanced = '--advanced' in sys.argv version = '--version' in sys.argv +full_help = '--full_help' in sys.argv if version: print("") @@ -22,6 +23,8 @@ parser.add_argument( '--advanced', help='Use the tools under an advanced level with additional arguments and tools available.', action='store_true', required=False, default=False) parser.add_argument( '--version', help='Print mapillary tools version.', action='store_true', required=False, default=False) +parser.add_argument( + '--full_help', help='Print full help for all the available commands and their arguments.', action='store_true', required=False, default=False) subparsers = parser.add_subparsers( help="Please choose one of the available tools", dest='tool', metavar='tool') @@ -44,6 +47,14 @@ for command in all_commands: if advanced: command.add_advanced_arguments(subparser) +if full_help: + subparsers_actions = [action for action in parser._actions if isinstance( + action, argparse._SubParsersAction)] + for subparsers_action in subparsers_actions: + for choice, subparser in subparsers_action.choices.items(): + print("Subcommand '{}'".format(choice)) + print(subparser.format_help()) + args = parser.parse_args() args_command = vars(args)['tool'] @@ -52,6 +63,8 @@ if 'advanced' in vars(args): del vars(args)['advanced'] if 'version' in vars(args): del vars(args)['version'] +if 'full_help' in vars(args): + del vars(args)['full_help'] # Run the selected subcommand if unit command, or in case of batch # command, run several unit commands From c9828e1cf602417c79cba7848a631510263aeb57 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Sun, 7 Oct 2018 23:52:18 +0200 Subject: [PATCH 02/36] add: more format in print out --- mapillary_tools/insert_MAPJson.py | 12 ++--- mapillary_tools/interpolation.py | 19 ++----- mapillary_tools/post_process.py | 3 +- mapillary_tools/process_csv.py | 3 +- mapillary_tools/process_geotag_properties.py | 2 +- .../process_import_meta_properties.py | 13 ++--- .../process_sequence_properties.py | 7 +-- mapillary_tools/process_upload_params.py | 13 ++--- mapillary_tools/process_user_properties.py | 2 +- mapillary_tools/process_video.py | 7 +-- mapillary_tools/processing.py | 53 ++++--------------- 11 files changed, 38 insertions(+), 96 deletions(-) diff --git a/mapillary_tools/insert_MAPJson.py b/mapillary_tools/insert_MAPJson.py index 3022fff4..2c87bbdd 100644 --- a/mapillary_tools/insert_MAPJson.py +++ b/mapillary_tools/insert_MAPJson.py @@ -4,6 +4,7 @@ import sys import processing import uploader +from tqdm import tqdm def insert_MAPJson(import_path, @@ -45,14 +46,7 @@ def insert_MAPJson(import_path, print("No images to run process finalization") print("If the images have already been processed and not yet uploaded, they can be processed again, by passing the argument --rerun") - progress_count = 0 - for image in process_file_list: - progress_count += 1 - if verbose: - if (progress_count % 50) == 0: - sys.stdout.write(".") - if (progress_count % 5000) == 0: - print("") + for image in tqdm(process_file_list, desc="Inserting mapillary image description in image EXIF"): # check the processing logs log_root = uploader.log_rootpath(image) @@ -75,4 +69,4 @@ def insert_MAPJson(import_path, final_mapillary_image_description, verbose=verbose) - print("Sub process finished") + print("Sub process ended") diff --git a/mapillary_tools/interpolation.py b/mapillary_tools/interpolation.py index c9b83efc..777d9087 100644 --- a/mapillary_tools/interpolation.py +++ b/mapillary_tools/interpolation.py @@ -9,6 +9,7 @@ import process_csv import csv import datetime +from tqdm import tqdm EPOCH = datetime.datetime.utcfromtimestamp(0) @@ -121,12 +122,7 @@ def interpolation(data, sys.stdout.write("Interpolating gps for {} images missing geotags.".format( len(missing_geotags))) - counter = 0 - for image, timestamp in missing_geotags: - counter += 1 - sys.stdout.write('.') - if (counter % 100) == 0: - print("") + for image, timestamp in tqdm(missing_geotags, desc="Interpolating missing gps"): # interpolate try: lat, lon, bearing, elevation = interpolate_lat_lon( @@ -171,14 +167,7 @@ def interpolation(data, # read timestamps timestamps = [] - counter = 0 - for image in process_file_list: - - # print progress - counter += 1 - sys.stdout.write('.') - if (counter % 100) == 0: - print("") + for image in tqdm(process_file_list, desc="Interpolating identical timestamps"): # load exif exif = ExifRead(image) @@ -197,7 +186,7 @@ def interpolation(data, counter = 0 # write back - for image, timestamp in zip(process_file_list, timestamps_interpolated): + for image, timestamp in tqdm(zip(process_file_list, timestamps_interpolated), desc="Writing capture time in image EXIF"): # print progress counter += 1 diff --git a/mapillary_tools/post_process.py b/mapillary_tools/post_process.py index 236919ac..aaa6d8bb 100644 --- a/mapillary_tools/post_process.py +++ b/mapillary_tools/post_process.py @@ -4,6 +4,7 @@ import uploader import json import shutil +from tqdm import tqdm def post_process(import_path, @@ -143,7 +144,7 @@ def post_process(import_path, to_be_pushed_files = uploader.get_success_only_manual_upload_file_list( import_path, skip_subfolders) params = {} - for image in to_be_pushed_files: + for image in tqdm(to_be_pushed_files, desc="Pushing images"): log_root = uploader.log_rootpath(image) upload_params_path = os.path.join( log_root, "upload_params_process.json") diff --git a/mapillary_tools/process_csv.py b/mapillary_tools/process_csv.py index 4e5b384c..b3e1669d 100644 --- a/mapillary_tools/process_csv.py +++ b/mapillary_tools/process_csv.py @@ -6,6 +6,7 @@ import process_import_meta_properties from exif_write import ExifEdit import csv +from tqdm import tqdm META_DATA_TYPES = ["string", "double", "long", "date", "boolean"] @@ -251,7 +252,7 @@ def process_csv(import_path, print("Warning, filename column not provided, images will be aligned with the csv data in order of the image filenames.") # process each image - for idx, image in enumerate(process_file_list): + for idx, image in tqdm(enumerate(process_file_list), desc="Inserting csv data in image EXIF"): # get image entry index image_index = get_image_index(image, file_names) if file_names else idx diff --git a/mapillary_tools/process_geotag_properties.py b/mapillary_tools/process_geotag_properties.py index 16f5ef9c..d140f96c 100644 --- a/mapillary_tools/process_geotag_properties.py +++ b/mapillary_tools/process_geotag_properties.py @@ -118,4 +118,4 @@ def process_geotag_properties(import_path, offset_time, offset_angle, verbose) - print("Sub process finished") + print("Sub process ended") diff --git a/mapillary_tools/process_import_meta_properties.py b/mapillary_tools/process_import_meta_properties.py index bbb78283..203e6025 100644 --- a/mapillary_tools/process_import_meta_properties.py +++ b/mapillary_tools/process_import_meta_properties.py @@ -4,6 +4,7 @@ import processing import uploader from exif_read import ExifRead +from tqdm import tqdm META_DATA_TYPES = {"strings": str, "doubles": float, @@ -201,14 +202,8 @@ def process_import_meta_properties(import_path, # read import meta from image EXIF and finalize the import # properties process - progress_count = 0 - for image in process_file_list: - progress_count += 1 - if verbose: - if (progress_count % 50) == 0: - sys.stdout.write(".") - if (progress_count % 5000) == 0: - print("") + for image in tqdm(process_file_list, desc="Processing image import properties"): + import_meta_data_properties = get_import_meta_properties_exif( image, verbose) finalize_import_properties_process(image, @@ -223,4 +218,4 @@ def process_import_meta_properties(import_path, import_meta_data_properties, custom_meta_data, camera_uuid) - print("Sub process finished") + print("Sub process ended") diff --git a/mapillary_tools/process_sequence_properties.py b/mapillary_tools/process_sequence_properties.py index b902e768..0aa751cd 100644 --- a/mapillary_tools/process_sequence_properties.py +++ b/mapillary_tools/process_sequence_properties.py @@ -7,6 +7,7 @@ from geo import compute_bearing, gps_distance, diff_bearing import processing import uploader +from tqdm import tqdm MAX_SEQUENCE_LENGTH = 500 @@ -17,8 +18,8 @@ def finalize_sequence_processing(sequence, final_capture_times, import_path, verbose=False): - for image, direction, capture_time in zip(final_file_list, - final_directions, final_capture_times): + for image, direction, capture_time in tqdm(zip(final_file_list, + final_directions, final_capture_times), desc="Finalizing sequence process"): mapillary_description = { 'MAPSequenceUUID': sequence, 'MAPCompassHeading': { @@ -197,4 +198,4 @@ def process_sequence_properties(import_path, MAX_SEQUENCE_LENGTH], import_path, verbose) - print("Sub process finished") + print("Sub process ended") diff --git a/mapillary_tools/process_upload_params.py b/mapillary_tools/process_upload_params.py index 689b523a..b2e19862 100644 --- a/mapillary_tools/process_upload_params.py +++ b/mapillary_tools/process_upload_params.py @@ -2,6 +2,7 @@ import sys import processing import uploader +from tqdm import tqdm def process_upload_params(import_path, @@ -74,14 +75,8 @@ def process_upload_params(import_path, user_signature_hash = credentials["user_signature_hash"] user_key = credentials["MAPSettingsUserKey"] - progress_count = 0 - for image in process_file_list: - progress_count += 1 - if verbose: - if (progress_count % 50) == 0: - sys.stdout.write(".") - if (progress_count % 5000) == 0: - print("") + for image in tqdm(process_file_list, desc="Processing image upload parameters"): + # check the status of the sequence processing log_root = uploader.log_rootpath(image) duplicate_flag_path = os.path.join(log_root, @@ -113,4 +108,4 @@ def process_upload_params(import_path, log_root, "manual_upload") open(log_manual_upload, 'a').close() - print("Sub process finished") + print("Sub process ended") diff --git a/mapillary_tools/process_user_properties.py b/mapillary_tools/process_user_properties.py index 68b799eb..19e42c13 100644 --- a/mapillary_tools/process_user_properties.py +++ b/mapillary_tools/process_user_properties.py @@ -83,4 +83,4 @@ def process_user_properties(import_path, "success", verbose, user_properties) - print("Sub process finished") + print("Sub process ended") diff --git a/mapillary_tools/process_video.py b/mapillary_tools/process_video.py index 8663f486..946e83c5 100644 --- a/mapillary_tools/process_video.py +++ b/mapillary_tools/process_video.py @@ -5,6 +5,7 @@ import subprocess import sys import uploader +from tqdm import tqdm from exif_write import ExifEdit @@ -31,7 +32,7 @@ def timestamps_from_filename(video_filename, interval=2.0, adjustment=1.0): capture_times = [] - for image in full_image_list: + for image in tqdm(full_image_list, desc="Deriving frame capture time"): capture_times.append(timestamp_from_filename(video_filename, os.path.basename(image), start_time, @@ -157,8 +158,8 @@ def insert_video_frame_timestamp(video_filename, import_path, start_time, sample sample_interval, duration_ratio) - for image, timestamp in zip(frame_list, - video_frame_timestamps): + for image, timestamp in tqdm(zip(frame_list, + video_frame_timestamps), desc="Inserting frame capture time"): try: exif_edit = ExifEdit(image) exif_edit.add_date_time_original(timestamp) diff --git a/mapillary_tools/processing.py b/mapillary_tools/processing.py index 70a02b29..b723ce60 100644 --- a/mapillary_tools/processing.py +++ b/mapillary_tools/processing.py @@ -18,7 +18,7 @@ from gps_parser import get_lat_lon_time_from_gpx, get_lat_lon_time_from_nmea from gpx_from_gopro import gpx_from_gopro from gpx_from_blackvue import gpx_from_blackvue - +from tqdm import tqdm STATUS_PAIRS = {"success": "failed", "failed": "success" @@ -44,11 +44,11 @@ def estimate_sub_second_time(files, interval=0.0): second that each picture was taken. ''' if interval <= 0.0: - return [exif_time(f) for f in files] + return [exif_time(f) for f in tqdm(files, desc="Reading image capture time")] onesecond = datetime.timedelta(seconds=1.0) T = datetime.timedelta(seconds=interval) - for i, f in enumerate(files): + for i, f in tqdm(enumerate(files), desc="Estimating subsecond time"): m = exif_time(f) if not m: pass @@ -73,14 +73,7 @@ def geotag_from_exif(process_file_list, import_path, offset_angle=0.0, verbose=False): - progress_count = 0 - for image in process_file_list: - progress_count += 1 - if verbose: - if (progress_count % 50) == 0: - sys.stdout.write(".") - if (progress_count % 5000) == 0: - print("") + for image in tqdm(process_file_list, desc="Extracting gps data from image EXIF"): geotag_properties = get_geotag_properties_from_exif( image, offset_angle, verbose) @@ -249,17 +242,8 @@ def geotag_from_gps_trace(process_file_list, # update offset time with the gps start time offset_time += (sorted(sub_second_times) [0] - gps_trace[0][0]).total_seconds() - if verbose: - sys.stdout.write("Geotagging from gpx trace...") - progress_count = 0 - for image, capture_time in zip(process_file_list, - sub_second_times): - progress_count += 1 - if verbose: - if (progress_count % 50) == 0: - sys.stdout.write(".") - if (progress_count % 5000) == 0: - print("") + for image, capture_time in tqdm(zip(process_file_list, + sub_second_times), desc="Inserting gps data into image EXIF"): if not capture_time: print("Error, capture time could not be extracted for image " + image) create_and_log_process(image, @@ -718,16 +702,7 @@ def create_and_log_process_in_list(process_file_list, status, verbose=False, mapillary_description={}): - if verbose: - sys.stdout.write("Logging...") - progress_count = 0 - for image in process_file_list: - progress_count += 1 - if verbose: - if (progress_count % 50) == 0: - sys.stdout.write(".") - if (progress_count % 5000) == 0: - print("") + for image in tqdm(process_file_list, desc="Logging"): create_and_log_process(image, process, status, @@ -910,17 +885,7 @@ def load_geotag_points(process_file_list, verbose=False): lons = [] directions = [] - if verbose: - sys.stdout.write("Loading geotag points...") - progress_count = 0 - for image in process_file_list: - progress_count += 1 - if verbose: - if (progress_count % 50) == 0: - sys.stdout.write(".") - if (progress_count % 5000) == 0: - print("") - # check the status of the geotagging + for image in tqdm(process_file_list, desc="Loading geotag points"): log_root = uploader.log_rootpath(image) geotag_data = get_geotag_data(log_root, image, @@ -1075,7 +1040,7 @@ def interpolate_timestamp(capture_times): def get_images_geotags(process_file_list): geotags = [] missing_geotags = [] - for image in sorted(process_file_list): + for image in tqdm(sorted(process_file_list), desc="Reading gps data"): exif = ExifRead(image) timestamp = exif.extract_capture_time() lon, lat = exif.extract_lon_lat() From e5c04cd2ab93f8b7941262dcd5f09e0f4dee6d15 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 8 Oct 2018 16:22:14 +0200 Subject: [PATCH 03/36] fix: index bug --- mapillary_tools/uploader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mapillary_tools/uploader.py b/mapillary_tools/uploader.py index 0c001303..5ed7d5ab 100644 --- a/mapillary_tools/uploader.py +++ b/mapillary_tools/uploader.py @@ -671,7 +671,7 @@ def upload_file_list(file_list, file_params={}, number_threads=None, max_attempt uploader.start() for uploader in uploaders: - uploaders[i].join(1) + uploaders[uploader].join(1) while q.unfinished_tasks: time.sleep(1) From 2f7adbf9fbfd21c76b3984b5e2c56ccc3fff543c Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 8 Oct 2018 17:02:21 +0200 Subject: [PATCH 04/36] fix: index bug --- mapillary_tools/uploader.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mapillary_tools/uploader.py b/mapillary_tools/uploader.py index 5ed7d5ab..f5745e36 100644 --- a/mapillary_tools/uploader.py +++ b/mapillary_tools/uploader.py @@ -670,8 +670,8 @@ def upload_file_list(file_list, file_params={}, number_threads=None, max_attempt uploader.daemon = True uploader.start() - for uploader in uploaders: - uploaders[uploader].join(1) + for idx in enumerate(uploaders): + uploaders[idx].join(1) while q.unfinished_tasks: time.sleep(1) From 569cb755f9a7bb3de9d9d846ccd5c43f23fe794c Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 8 Oct 2018 17:04:28 +0200 Subject: [PATCH 05/36] fix: index bug --- mapillary_tools/uploader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mapillary_tools/uploader.py b/mapillary_tools/uploader.py index f5745e36..8b04ca5b 100644 --- a/mapillary_tools/uploader.py +++ b/mapillary_tools/uploader.py @@ -670,7 +670,7 @@ def upload_file_list(file_list, file_params={}, number_threads=None, max_attempt uploader.daemon = True uploader.start() - for idx in enumerate(uploaders): + for idx, uploader in enumerate(uploaders): uploaders[idx].join(1) while q.unfinished_tasks: From 1c1f450fa397a12d3f05eb8d71c2d5af748d639b Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 8 Oct 2018 23:24:49 +0200 Subject: [PATCH 06/36] add: store local file name to photo uuid mapping --- mapillary_tools/commands/post_process.py | 2 + mapillary_tools/post_process.py | 440 ++++++++++++----------- 2 files changed, 238 insertions(+), 204 deletions(-) diff --git a/mapillary_tools/commands/post_process.py b/mapillary_tools/commands/post_process.py index b8f4946c..eeccebb7 100644 --- a/mapillary_tools/commands/post_process.py +++ b/mapillary_tools/commands/post_process.py @@ -33,6 +33,8 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument('--split_import_path', help='Provide the path where the images should be moved to based on the import status.', action='store', required=False, default=None) + parser.add_argument('--save_local_mapping', help='Save the mapillary photo uuid to local file mapping in a csv.', + action='store_true', default=False, required=False) def run(self, args): diff --git a/mapillary_tools/post_process.py b/mapillary_tools/post_process.py index aaa6d8bb..bd899d78 100644 --- a/mapillary_tools/post_process.py +++ b/mapillary_tools/post_process.py @@ -5,6 +5,7 @@ import json import shutil from tqdm import tqdm +import csv def post_process(import_path, @@ -18,10 +19,11 @@ def post_process(import_path, list_file_status=False, push_images=False, skip_subfolders=False, - verbose=False): + verbose=False, + save_local_mapping=False): # return if nothing specified - if not summarize and not move_images and not list_file_status and not push_images and not move_duplicates and not move_uploaded: + if not summarize and not move_images and not list_file_status and not push_images and not move_duplicates and not move_uploaded and not save_local_mapping: print("No post processing action specified.") return @@ -44,213 +46,243 @@ def post_process(import_path, print("Error, import directory " + import_path + " does not exist, exiting...") sys.exit(1) + if save_local_mapping: + local_mapping_filepath = os.path.join(os.path.dirname( + import_path), import_path + "_mapillary_image_uuid_to_local_path_mapping.csv") + + total_files = uploader.get_total_file_list(import_path) + + local_mapping = [] + for file in total_files: + relative_path = file.lstrip(os.path.abspath(import_path)) + log_rootpath = uploader.log_rootpath(file) + image_description_json_path = os.path.join( + log_rootpath, "mapillary_image_description.json") + if os.path.isfile(image_description_json_path): + image_description_json = processing.load_json( + image_description_json_path) + if "MAPPhotoUUID" in image_description_json: + image_file_uuid = image_description_json["MAPPhotoUUID"] + local_mapping.append((relative_path, image_file_uuid)) + else: + print( + "Error, photo uuid not in mapillary_image_description.json log file.") + else: + print("Warning, mapillary_image_description.json log file does not exist for image {}. Likely it was not processed successfully.") + with open(local_mapping_filepath, "w") as csvfile: + csvwriter = csv.writer(csvfile, delimiter=",") + for row in local_mapping: + csvwriter.writerow(row) + + else: + + print("Reading import logs for import path {}...".format(import_path)) + + # collect logs + summary_dict = {} + status_list_dict = {} + + total_files = uploader.get_total_file_list(import_path) + total_files_count = len(total_files) + + # upload logs + uploaded_files = uploader.get_success_upload_file_list( + import_path, skip_subfolders) + uploaded_files_count = len(uploaded_files) + + failed_upload_files = uploader.get_failed_upload_file_list( + import_path, skip_subfolders) + failed_upload_files_count = len(failed_upload_files) - print("Reading import logs for import path {}...".format(import_path)) - - # collect logs - summary_dict = {} - status_list_dict = {} - - total_files = uploader.get_total_file_list(import_path) - total_files_count = len(total_files) - - # upload logs - uploaded_files = uploader.get_success_upload_file_list( - import_path, skip_subfolders) - uploaded_files_count = len(uploaded_files) - - failed_upload_files = uploader.get_failed_upload_file_list( - import_path, skip_subfolders) - failed_upload_files_count = len(failed_upload_files) - - to_be_finalized_files = uploader.get_finalize_file_list(import_path) - to_be_finalized_files_count = len(to_be_finalized_files) - - summary_dict["total images"] = total_files_count - summary_dict["upload summary"] = { - "successfully uploaded": uploaded_files_count, - "failed uploads": failed_upload_files_count, - "uploaded to be finalized": to_be_finalized_files_count - } - - status_list_dict["successfully uploaded"] = uploaded_files - status_list_dict["failed uploads"] = failed_upload_files - status_list_dict["uploaded to be finalized"] = to_be_finalized_files - - # process logs - summary_dict["process summary"] = {} - process_steps = ["user_process", "import_meta_process", "geotag_process", - "sequence_process", "upload_params_process", "mapillary_image_description"] - process_status = ["success", "failed"] - for step in process_steps: - - process_success = len(processing.get_process_status_file_list( - import_path, step, "success", skip_subfolders)) - process_failed = len(processing.get_process_status_file_list( - import_path, step, "failed", skip_subfolders)) - - summary_dict["process summary"][step] = { - "failed": process_failed, - "success": process_success + to_be_finalized_files = uploader.get_finalize_file_list(import_path) + to_be_finalized_files_count = len(to_be_finalized_files) + + summary_dict["total images"] = total_files_count + summary_dict["upload summary"] = { + "successfully uploaded": uploaded_files_count, + "failed uploads": failed_upload_files_count, + "uploaded to be finalized": to_be_finalized_files_count } - duplicates_file_list = processing.get_duplicate_file_list( - import_path, skip_subfolders) - duplicates_file_list_count = len(duplicates_file_list) - - summary_dict["process summary"]["duplicates"] = duplicates_file_list_count - status_list_dict["duplicates"] = duplicates_file_list - - # processed for upload - to_be_uploaded_files = uploader.get_upload_file_list( - import_path, skip_subfolders) - to_be_uploaded_files_count = len(to_be_uploaded_files) - summary_dict["process summary"]["processed_not_yet_uploaded"] = to_be_uploaded_files_count - status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files - - # summary - if summarize: - print("") - print("Import summary for import path {} :".format(import_path)) - print(json.dumps(summary_dict, indent=4)) - - if save_as_json: - - try: - processing.save_json(summary_dict, os.path.join( - import_path, "mapillary_import_summary.json")) - except Exception as e: - print("Could not save summary into json at {}, due to {}".format( - os.path.join(import_path, "mapillary_import_summary.json"), e)) - - # list file status - if list_file_status: - print("") - print("List of file status for import path {} :".format(import_path)) - print(json.dumps(status_list_dict, indent=4)) - - if save_as_json: - - try: - processing.save_json(status_list_dict, os.path.join( - import_path, "mapillary_import_image_status_list.json")) - except Exception as e: - print("Could not save image status list into json at {}, due to {}".format( - os.path.join(import_path, "mapillary_import_image_status_list.json"), e)) - - # push images that were uploaded successfully - # collect upload params - if push_images: - to_be_pushed_files = uploader.get_success_only_manual_upload_file_list( + status_list_dict["successfully uploaded"] = uploaded_files + status_list_dict["failed uploads"] = failed_upload_files + status_list_dict["uploaded to be finalized"] = to_be_finalized_files + + # process logs + summary_dict["process summary"] = {} + process_steps = ["user_process", "import_meta_process", "geotag_process", + "sequence_process", "upload_params_process", "mapillary_image_description"] + process_status = ["success", "failed"] + for step in process_steps: + + process_success = len(processing.get_process_status_file_list( + import_path, step, "success", skip_subfolders)) + process_failed = len(processing.get_process_status_file_list( + import_path, step, "failed", skip_subfolders)) + + summary_dict["process summary"][step] = { + "failed": process_failed, + "success": process_success + } + + duplicates_file_list = processing.get_duplicate_file_list( + import_path, skip_subfolders) + duplicates_file_list_count = len(duplicates_file_list) + + summary_dict["process summary"]["duplicates"] = duplicates_file_list_count + status_list_dict["duplicates"] = duplicates_file_list + + # processed for upload + to_be_uploaded_files = uploader.get_upload_file_list( import_path, skip_subfolders) - params = {} - for image in tqdm(to_be_pushed_files, desc="Pushing images"): - log_root = uploader.log_rootpath(image) - upload_params_path = os.path.join( - log_root, "upload_params_process.json") - if os.path.isfile(upload_params_path): - with open(upload_params_path, "rb") as jf: - params[image] = json.load( - jf, object_hook=uploader.ascii_encode_dict) - - # get the s3 locations of the sequences - finalize_params = uploader.process_upload_finalization( - to_be_pushed_files, params) - uploader.finalize_upload(finalize_params) - # flag finalization for each file - uploader.flag_finalization(to_be_pushed_files) - - if move_images or move_duplicates or move_uploaded: - print("") - print("Note that images will be moved along with their mapillary logs in order to preserve the import status") - defualt_split_import_path = os.path.join( - import_path, "mapillary_import_split_images") - if not split_import_path: - final_split_path = defualt_split_import_path + to_be_uploaded_files_count = len(to_be_uploaded_files) + summary_dict["process summary"]["processed_not_yet_uploaded"] = to_be_uploaded_files_count + status_list_dict["processed_not_yet_uploaded"] = to_be_uploaded_files + + # summary + if summarize: + print("") + print("Import summary for import path {} :".format(import_path)) + print(json.dumps(summary_dict, indent=4)) + + if save_as_json: + + try: + processing.save_json(summary_dict, os.path.join( + import_path, "mapillary_import_summary.json")) + except Exception as e: + print("Could not save summary into json at {}, due to {}".format( + os.path.join(import_path, "mapillary_import_summary.json"), e)) + + # list file status + if list_file_status: print("") - print( - "Split import path not provided and will therefore be set to default path {}".format(defualt_split_import_path)) - if split_import_path: - if not os.path.isfile(split_import_path): + print("List of file status for import path {} :".format(import_path)) + print(json.dumps(status_list_dict, indent=4)) + + if save_as_json: + + try: + processing.save_json(status_list_dict, os.path.join( + import_path, "mapillary_import_image_status_list.json")) + except Exception as e: + print("Could not save image status list into json at {}, due to {}".format( + os.path.join(import_path, "mapillary_import_image_status_list.json"), e)) + + # push images that were uploaded successfully + # collect upload params + if push_images: + to_be_pushed_files = uploader.get_success_only_manual_upload_file_list( + import_path, skip_subfolders) + params = {} + for image in tqdm(to_be_pushed_files, desc="Pushing images"): + log_root = uploader.log_rootpath(image) + upload_params_path = os.path.join( + log_root, "upload_params_process.json") + if os.path.isfile(upload_params_path): + with open(upload_params_path, "rb") as jf: + params[image] = json.load( + jf, object_hook=uploader.ascii_encode_dict) + + # get the s3 locations of the sequences + finalize_params = uploader.process_upload_finalization( + to_be_pushed_files, params) + uploader.finalize_upload(finalize_params) + # flag finalization for each file + uploader.flag_finalization(to_be_pushed_files) + + if move_images or move_duplicates or move_uploaded: + print("") + print("Note that images will be moved along with their mapillary logs in order to preserve the import status") + defualt_split_import_path = os.path.join( + import_path, "mapillary_import_split_images") + if not split_import_path: final_split_path = defualt_split_import_path - print("Split import path does not exist, split import path will be set to default path {}".format( - defualt_split_import_path)) - else: - final_split_path = split_import_path - print("") - print("Splitting import path {} into {} based on image import status...".format( - import_path, final_split_path)) - if move_images: - move_duplicates = True - move_uploaded = True - # move failed uploads - if not len(failed_upload_files): print("") print( - "There are no failed upload images in the specified import path.") - else: - failed_upload_path = os.path.join( - final_split_path, "upload_failed") - - if not os.path.isdir(failed_upload_path): - os.makedirs(failed_upload_path) - - for failed in failed_upload_files: - failed_upload_image_path = os.path.join( - failed_upload_path, os.path.basename(failed)) - os.rename(failed, failed_upload_path) - failed_upload_log_path = os.path.dirname(uploader.log_rootpath( - failed_upload_image_path)) - if not os.path.isdir(failed_upload_log_path): - os.makedirs(failed_upload_log_path) - shutil.move(uploader.log_rootpath(failed), - failed_upload_log_path) - print("") - print("Done moving failed upload images to {}".format( - failed_upload_path)) - if move_duplicates: - if not len(duplicates_file_list): - print("") - print("There were no duplicates flagged in the specified import path. If you are processing the images with mapillary_tools and would like to flag duplicates, you must specify --advanced --flag_duplicates") - else: - duplicate_path = os.path.join(final_split_path, "duplicates") - if not os.path.isdir(duplicate_path): - os.makedirs(duplicate_path) - for duplicate in duplicates_file_list: - duplicate_image_path = os.path.join( - duplicate_path, os.path.basename(duplicate)) - os.rename(duplicate, duplicate_image_path) - duplicate_log_path = os.path.dirname(uploader.log_rootpath( - duplicate_image_path)) - if not os.path.isdir(duplicate_log_path): - os.makedirs(duplicate_log_path) - shutil.move(uploader.log_rootpath(duplicate), - duplicate_log_path) - print("") - print("Done moving duplicate images to {}".format( - duplicate_path)) - if move_uploaded: - if not len(uploaded_files): - print("") - print( - "There are no successfuly uploaded images in the specified import path.") - else: - upload_success_path = os.path.join( - final_split_path, "upload_success") - - if not os.path.isdir(upload_success_path): - os.makedirs(upload_success_path) - - for uploaded in uploaded_files: - uploaded_image_path = os.path.join( - upload_success_path, os.path.basename(uploaded)) - os.rename(uploaded, upload_success_path) - uploaded_log_path = os.path.dirname(uploader.log_rootpath( - uploaded_image_path)) - if not os.path.isdir(uploaded_log_path): - os.makedirs(uploaded_log_path) - shutil.move(uploader.log_rootpath(uploaded), - uploaded_log_path) - print("") - print("Done moving successfully uploaded images to {}".format( - upload_success_path)) + "Split import path not provided and will therefore be set to default path {}".format(defualt_split_import_path)) + if split_import_path: + if not os.path.isfile(split_import_path): + final_split_path = defualt_split_import_path + print("Split import path does not exist, split import path will be set to default path {}".format( + defualt_split_import_path)) + else: + final_split_path = split_import_path + print("") + print("Splitting import path {} into {} based on image import status...".format( + import_path, final_split_path)) + if move_images: + move_duplicates = True + move_uploaded = True + # move failed uploads + if not len(failed_upload_files): + print("") + print( + "There are no failed upload images in the specified import path.") + else: + failed_upload_path = os.path.join( + final_split_path, "upload_failed") + + if not os.path.isdir(failed_upload_path): + os.makedirs(failed_upload_path) + + for failed in failed_upload_files: + failed_upload_image_path = os.path.join( + failed_upload_path, os.path.basename(failed)) + os.rename(failed, failed_upload_path) + failed_upload_log_path = os.path.dirname(uploader.log_rootpath( + failed_upload_image_path)) + if not os.path.isdir(failed_upload_log_path): + os.makedirs(failed_upload_log_path) + shutil.move(uploader.log_rootpath(failed), + failed_upload_log_path) + print("") + print("Done moving failed upload images to {}".format( + failed_upload_path)) + if move_duplicates: + if not len(duplicates_file_list): + print("") + print("There were no duplicates flagged in the specified import path. If you are processing the images with mapillary_tools and would like to flag duplicates, you must specify --advanced --flag_duplicates") + else: + duplicate_path = os.path.join( + final_split_path, "duplicates") + if not os.path.isdir(duplicate_path): + os.makedirs(duplicate_path) + for duplicate in duplicates_file_list: + duplicate_image_path = os.path.join( + duplicate_path, os.path.basename(duplicate)) + os.rename(duplicate, duplicate_image_path) + duplicate_log_path = os.path.dirname(uploader.log_rootpath( + duplicate_image_path)) + if not os.path.isdir(duplicate_log_path): + os.makedirs(duplicate_log_path) + shutil.move(uploader.log_rootpath(duplicate), + duplicate_log_path) + print("") + print("Done moving duplicate images to {}".format( + duplicate_path)) + if move_uploaded: + if not len(uploaded_files): + print("") + print( + "There are no successfuly uploaded images in the specified import path.") + else: + upload_success_path = os.path.join( + final_split_path, "upload_success") + + if not os.path.isdir(upload_success_path): + os.makedirs(upload_success_path) + + for uploaded in uploaded_files: + uploaded_image_path = os.path.join( + upload_success_path, os.path.basename(uploaded)) + os.rename(uploaded, upload_success_path) + uploaded_log_path = os.path.dirname(uploader.log_rootpath( + uploaded_image_path)) + if not os.path.isdir(uploaded_log_path): + os.makedirs(uploaded_log_path) + shutil.move(uploader.log_rootpath(uploaded), + uploaded_log_path) + print("") + print("Done moving successfully uploaded images to {}".format( + upload_success_path)) From 727fe2ea7153224176013512ced790987ad79426 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 8 Oct 2018 23:26:38 +0200 Subject: [PATCH 07/36] fix: missing format --- mapillary_tools/post_process.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mapillary_tools/post_process.py b/mapillary_tools/post_process.py index bd899d78..19bae23e 100644 --- a/mapillary_tools/post_process.py +++ b/mapillary_tools/post_process.py @@ -68,7 +68,7 @@ def post_process(import_path, print( "Error, photo uuid not in mapillary_image_description.json log file.") else: - print("Warning, mapillary_image_description.json log file does not exist for image {}. Likely it was not processed successfully.") + print("Warning, mapillary_image_description.json log file does not exist for image {}. Likely it was not processed successfully.".format(file)) with open(local_mapping_filepath, "w") as csvfile: csvwriter = csv.writer(csvfile, delimiter=",") for row in local_mapping: From 95b08abb4233cd856795d5561cac970e8472258d Mon Sep 17 00:00:00 2001 From: Yubin Kuang Date: Thu, 11 Oct 2018 20:16:30 +0200 Subject: [PATCH 08/36] fix: add tqdm dependency --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ba5615dd..c6665261 100644 --- a/setup.py +++ b/setup.py @@ -17,5 +17,6 @@ 'python-dateutil==2.7.3', 'pymp4==1.1.0', 'pynmea2==1.12.0', - 'pytest==3.2.3' + 'pytest==3.2.3', + 'tqdm==2.2.4' ]) From 0fe5f65bfaf9ebfcdbd68577e359e5d84522ffe7 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Thu, 11 Oct 2018 21:14:25 +0200 Subject: [PATCH 09/36] add: post process arg --- mapillary_tools/commands/process.py | 2 ++ mapillary_tools/commands/process_and_upload.py | 2 ++ mapillary_tools/commands/video_process.py | 2 ++ mapillary_tools/commands/video_process_and_upload.py | 2 ++ 4 files changed, 8 insertions(+) diff --git a/mapillary_tools/commands/process.py b/mapillary_tools/commands/process.py index 49714c65..2bca136b 100644 --- a/mapillary_tools/commands/process.py +++ b/mapillary_tools/commands/process.py @@ -99,6 +99,8 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument( '--split_import_path', help='If splitting the import path into duplicates, sequences, success and failed uploads, provide a path for the splits.', default=None, required=False) + parser.add_argument('--save_local_mapping', help='Save the mapillary photo uuid to local file mapping in a csv.', + action='store_true', default=False, required=False) # add custom meta data in a form of a string consisting of a triplet # "name,type,value" diff --git a/mapillary_tools/commands/process_and_upload.py b/mapillary_tools/commands/process_and_upload.py index 4a231f55..2ee48a47 100644 --- a/mapillary_tools/commands/process_and_upload.py +++ b/mapillary_tools/commands/process_and_upload.py @@ -104,6 +104,8 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument( '--split_import_path', help='If splitting the import path into duplicates, sequences, success and failed uploads, provide a path for the splits.', default=None, required=False) + parser.add_argument('--save_local_mapping', help='Save the mapillary photo uuid to local file mapping in a csv.', + action='store_true', default=False, required=False) # add custom meta data in a form of a string consisting of a triplet # "name,type,value" diff --git a/mapillary_tools/commands/video_process.py b/mapillary_tools/commands/video_process.py index 4f506657..cc2f64bb 100644 --- a/mapillary_tools/commands/video_process.py +++ b/mapillary_tools/commands/video_process.py @@ -108,6 +108,8 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument( '--split_import_path', help='If splitting the import path into duplicates, sequences, success and failed uploads, provide a path for the splits.', default=None, required=False) + parser.add_argument('--save_local_mapping', help='Save the mapillary photo uuid to local file mapping in a csv.', + action='store_true', default=False, required=False) # add custom meta data in a form of a string consisting of a triplet # "name,type,value" diff --git a/mapillary_tools/commands/video_process_and_upload.py b/mapillary_tools/commands/video_process_and_upload.py index 921c1a0d..0b5514fd 100644 --- a/mapillary_tools/commands/video_process_and_upload.py +++ b/mapillary_tools/commands/video_process_and_upload.py @@ -114,6 +114,8 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument( '--split_import_path', help='If splitting the import path into duplicates, sequences, success and failed uploads, provide a path for the splits.', default=None, required=False) + parser.add_argument('--save_local_mapping', help='Save the mapillary photo uuid to local file mapping in a csv.', + action='store_true', default=False, required=False) # add custom meta data in a form of a string consisting of a triplet # "name,type,value" From 023d60b0091efb7794e6520160d57f13257a7c60 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Fri, 12 Oct 2018 09:45:59 +0200 Subject: [PATCH 10/36] add: more info on failing gps extraction --- mapillary_tools/process_geotag_properties.py | 3 --- mapillary_tools/processing.py | 15 ++++++--------- 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/mapillary_tools/process_geotag_properties.py b/mapillary_tools/process_geotag_properties.py index d140f96c..67839fd2 100644 --- a/mapillary_tools/process_geotag_properties.py +++ b/mapillary_tools/process_geotag_properties.py @@ -75,7 +75,6 @@ def process_geotag_properties(import_path, elif geotag_source == "gpx" or geotag_source == "nmea": geotag_properties = processing.geotag_from_gps_trace(process_file_list, - import_path, geotag_source, geotag_source_path, offset_time, @@ -93,7 +92,6 @@ def process_geotag_properties(import_path, verbose) elif geotag_source == "gopro_video": geotag_properties = processing.geotag_from_gopro_video(process_file_list, - import_path, geotag_source_path, offset_time, offset_angle, @@ -103,7 +101,6 @@ def process_geotag_properties(import_path, verbose) elif geotag_source == "blackvue_videos": geotag_properties = processing.geotag_from_blackvue_video(process_file_list, - import_path, geotag_source_path, offset_time, offset_angle, diff --git a/mapillary_tools/processing.py b/mapillary_tools/processing.py index b723ce60..6fcbccf8 100644 --- a/mapillary_tools/processing.py +++ b/mapillary_tools/processing.py @@ -136,7 +136,6 @@ def get_geotag_properties_from_exif(image, offset_angle=0.0, verbose=False): def geotag_from_gopro_video(process_file_list, - import_path, geotag_source_path, offset_time, offset_angle, @@ -148,12 +147,12 @@ def geotag_from_gopro_video(process_file_list, geotag_source_path = gpx_from_gopro(geotag_source_path) if not geotag_source_path or not os.path.isfile(geotag_source_path): raise Exception - except: - print("Error, failed extracting data from gopro video, exiting...") + except Exception as e: + print("Error, failed extracting data from gopro geotag source path {} due to {}, exiting...".format( + geotag_source_path, e)) sys.exit(1) geotag_from_gps_trace(process_file_list, - import_path, "gpx", geotag_source_path, offset_time, @@ -165,7 +164,6 @@ def geotag_from_gopro_video(process_file_list, def geotag_from_blackvue_video(process_file_list, - import_path, geotag_source_path, offset_time, offset_angle, @@ -177,12 +175,12 @@ def geotag_from_blackvue_video(process_file_list, geotag_source_path = gpx_from_blackvue(geotag_source_path) if not geotag_source_path or not os.path.isfile(geotag_source_path): raise Exception - except Exception: - print("Error, failed extracting data from blackvue video, exiting...") + except Exception as e: + print("Error, failed extracting data from blackvue geotag source path {} due to {}, exiting...".format( + geotag_source_path, e)) sys.exit(1) geotag_from_gps_trace(process_file_list, - import_path, "gpx", geotag_source_path, offset_time, @@ -194,7 +192,6 @@ def geotag_from_blackvue_video(process_file_list, def geotag_from_gps_trace(process_file_list, - import_path, geotag_source, geotag_source_path, offset_time=0.0, From b28bee878b34b961abdafe27d5d96b4916832126 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Fri, 12 Oct 2018 11:54:41 +0200 Subject: [PATCH 11/36] add: more information about failing nmea parsing --- mapillary_tools/gpx_from_blackvue.py | 43 ++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 12 deletions(-) diff --git a/mapillary_tools/gpx_from_blackvue.py b/mapillary_tools/gpx_from_blackvue.py index ded74602..60e2b4b4 100644 --- a/mapillary_tools/gpx_from_blackvue.py +++ b/mapillary_tools/gpx_from_blackvue.py @@ -65,8 +65,12 @@ def get_points_from_bv(path): for l in lines: if "GPRMC" in l: m = l.lstrip('[]0123456789') - data = pynmea2.parse(m) - date = data.datetime.date() + try: + data = pynmea2.parse(m) + date = data.datetime.date() + except Exception as e: + print( + "Error in extracting the gps trace, nmea parsing failed due to {}".format(e)) break # Parse GPS trace @@ -80,15 +84,22 @@ def get_points_from_bv(path): m = l.lstrip('[]0123456789') if "GPRMC" in m: - data = pynmea2.parse(m) - date = data.datetime.date() - + try: + data = pynmea2.parse(m) + date = data.datetime.date() + except Exception as e: + print( + "Error in parsing gps trace to extract date information, nmea parsing failed due to {}".format(e)) if "$GPGGA" in m: - data = pynmea2.parse(m) - timestamp = datetime.datetime.combine( - date, data.timestamp) - lat, lon, alt = data.latitude, data.longitude, data.altitude - points.append((timestamp, lat, lon, alt)) + try: + data = pynmea2.parse(m) + timestamp = datetime.datetime.combine( + date, data.timestamp) + lat, lon, alt = data.latitude, data.longitude, data.altitude + points.append((timestamp, lat, lon, alt)) + except Exception as e: + print( + "Error in parsing gps trace to extract time and gps information, nmea parsing failed due to {}".format(e)) points.sort() offset += newb.end @@ -104,13 +115,21 @@ def gpx_from_blackvue(bv_video): if os.path.isdir(bv_video): video_files = uploader.get_video_file_list(bv_video) for video in video_files: - bv_data += get_points_from_bv(video) + try: + bv_data += get_points_from_bv(video) + except Exception as e: + print( + "Warning, could not extract gps from video {} due to {}, video will be skipped...".format(video, e)) dirname = os.path.dirname(bv_video) basename = os.path.basename(bv_video) gpx_path = os.path.join(dirname, basename + '.gpx') else: - bv_data = get_points_from_bv(bv_video) + try: + bv_data = get_points_from_bv(bv_video) + except Exception as e: + print( + "Warning, could not extract gps from video {} due to {}, video will be skipped...".format(bv_video, e)) basename, extension = os.path.splitext(bv_video) gpx_path = basename + '.gpx' From 45e1e412cf1d3ef291c96d3cc2208a939f7ec03f Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Fri, 12 Oct 2018 14:10:36 +0200 Subject: [PATCH 12/36] add: enable mapping for images that werent processed by tools --- mapillary_tools/post_process.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/mapillary_tools/post_process.py b/mapillary_tools/post_process.py index 19bae23e..fda8fa6d 100644 --- a/mapillary_tools/post_process.py +++ b/mapillary_tools/post_process.py @@ -6,6 +6,7 @@ import shutil from tqdm import tqdm import csv +import exif_read def post_process(import_path, @@ -54,6 +55,7 @@ def post_process(import_path, local_mapping = [] for file in total_files: + image_file_uuid = None relative_path = file.lstrip(os.path.abspath(import_path)) log_rootpath = uploader.log_rootpath(file) image_description_json_path = os.path.join( @@ -63,12 +65,19 @@ def post_process(import_path, image_description_json_path) if "MAPPhotoUUID" in image_description_json: image_file_uuid = image_description_json["MAPPhotoUUID"] - local_mapping.append((relative_path, image_file_uuid)) else: print( "Error, photo uuid not in mapillary_image_description.json log file.") else: - print("Warning, mapillary_image_description.json log file does not exist for image {}. Likely it was not processed successfully.".format(file)) + image_exif = exif_read.ExifRead(file) + image_description = json.loads( + image_exif.extract_image_description()) + if "MAPPhotoUUID" in image_description: + image_file_uuid = str(image_description["MAPPhotoUUID"]) + else: + print("Warning, image {} EXIF does not contain mapillary image description and mapillary_image_description.json log file does not exist. Try to process the image using mapillary_tools.".format(file)) + if image_file_uuid: + local_mapping.append((relative_path, image_file_uuid)) with open(local_mapping_filepath, "w") as csvfile: csvwriter = csv.writer(csvfile, delimiter=",") for row in local_mapping: From a944e23ed954e2ec1e5f241a41899d005a9579f8 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Fri, 12 Oct 2018 14:11:38 +0200 Subject: [PATCH 13/36] add: mapping arg to upload command --- mapillary_tools/commands/upload.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mapillary_tools/commands/upload.py b/mapillary_tools/commands/upload.py index 9c1486c3..422ca309 100644 --- a/mapillary_tools/commands/upload.py +++ b/mapillary_tools/commands/upload.py @@ -32,6 +32,8 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument('--push_images', help='Push images uploaded in given import path.', action='store_true', default=False, required=False) + parser.add_argument('--save_local_mapping', help='Save the mapillary photo uuid to local file mapping in a csv.', + action='store_true', default=False, required=False) def run(self, args): From 987971003609c3fd390bb73e14b1357959e90bc5 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Fri, 12 Oct 2018 15:56:34 +0200 Subject: [PATCH 14/36] add: time offset in geotagged images and missing import --- .../commands/process_and_upload.py | 1 + mapillary_tools/gpx_from_exif.py | 71 +++++++++++++++++++ mapillary_tools/process_geotag_properties.py | 15 +--- mapillary_tools/processing.py | 54 +++++++------- 4 files changed, 101 insertions(+), 40 deletions(-) create mode 100644 mapillary_tools/gpx_from_exif.py diff --git a/mapillary_tools/commands/process_and_upload.py b/mapillary_tools/commands/process_and_upload.py index 2ee48a47..4afd92ae 100644 --- a/mapillary_tools/commands/process_and_upload.py +++ b/mapillary_tools/commands/process_and_upload.py @@ -6,6 +6,7 @@ from mapillary_tools.process_upload_params import process_upload_params from mapillary_tools.insert_MAPJson import insert_MAPJson from mapillary_tools.upload import upload +from mapillary_tools.post_process import post_process class Command: diff --git a/mapillary_tools/gpx_from_exif.py b/mapillary_tools/gpx_from_exif.py new file mode 100644 index 00000000..00915ebf --- /dev/null +++ b/mapillary_tools/gpx_from_exif.py @@ -0,0 +1,71 @@ +import gpxpy +import exif_read + + +def write_gpx(path, data): + gpx = gpxpy.gpx.GPX() + + gpx_track = gpxpy.gpx.GPXTrack() + gpx.tracks.append(gpx_track) + + gpx_segment = gpxpy.gpx.GPXTrackSegment() + gpx_track.segments.append(gpx_segment) + + for point in data: + gpx_segment.points.append(gpxpy.gpx.GPXTrackPoint( + point[1], point[2], elevation=point[3], time=point[0])) + + with open(path, "w") as f: + f.write(gpx.to_xml()) + + +def get_points_from_exif(file_list, verbose=False): + data = [] + for file in file_list: + point = () + try: + exif = ExifRead(image) + except: + if verbose: + print("Warning, EXIF could not be read for image {}.".format(file)) + continue + try: + lon, lat = exif.extract_lon_lat() + except: + if verbose: + print( + "Warning {} image latitude or longitude tag not in EXIF.".format(file)) + continue + try: + timestamp = exif.extract_capture_time() + except: + if verbose: + print( + "Warning {} image capture time tag not in EXIF.".format(file)) + continue + if lon != None and lat != None and timestamp != None: + point = point + (timestamp, lat, lon) + else: + continue + + try: + altitude = exif.extract_altitude() + point = point + (altitude, ) + except: + pass + try: + heading = exif.extract_direction() + point = point + (heading, ) + except: + pass + if point: + data.append(point) + return data + + +def gpx_from_exif(file_list, import_path, verbose=False): + data = get_points_from_exif(file_list, verbose) + gpx_path = import_path + '.gpx' + write_gpx(gpx_path, data) + + return gpx_path diff --git a/mapillary_tools/process_geotag_properties.py b/mapillary_tools/process_geotag_properties.py index 67839fd2..513649f8 100644 --- a/mapillary_tools/process_geotag_properties.py +++ b/mapillary_tools/process_geotag_properties.py @@ -70,6 +70,7 @@ def process_geotag_properties(import_path, if geotag_source == "exif": geotag_properties = processing.geotag_from_exif(process_file_list, import_path, + offset_time, offset_angle, verbose) @@ -83,13 +84,6 @@ def process_geotag_properties(import_path, sub_second_interval, use_gps_start_time, verbose) - elif geotag_source == "csv": - geotag_properties = processing.geotag_from_csv(process_file_list, - import_path, - geotag_source_path, - offset_time, - offset_angle, - verbose) elif geotag_source == "gopro_video": geotag_properties = processing.geotag_from_gopro_video(process_file_list, geotag_source_path, @@ -108,11 +102,4 @@ def process_geotag_properties(import_path, sub_second_interval, use_gps_start_time, verbose) - elif geotag_source == "json": - geotag_properties = processing.geotag_from_json(process_file_list, - import_path, - geotag_source_path, - offset_time, - offset_angle, - verbose) print("Sub process ended") diff --git a/mapillary_tools/processing.py b/mapillary_tools/processing.py index 6fcbccf8..f5e0b6e6 100644 --- a/mapillary_tools/processing.py +++ b/mapillary_tools/processing.py @@ -18,6 +18,7 @@ from gps_parser import get_lat_lon_time_from_gpx, get_lat_lon_time_from_nmea from gpx_from_gopro import gpx_from_gopro from gpx_from_blackvue import gpx_from_blackvue +from gpx_from_exif import gpx_from_exif from tqdm import tqdm STATUS_PAIRS = {"success": "failed", @@ -71,17 +72,36 @@ def estimate_sub_second_time(files, interval=0.0): def geotag_from_exif(process_file_list, import_path, + offset_time=0.0, offset_angle=0.0, verbose=False): - for image in tqdm(process_file_list, desc="Extracting gps data from image EXIF"): - geotag_properties = get_geotag_properties_from_exif( - image, offset_angle, verbose) + if offset_time == 0: + for image in tqdm(process_file_list, desc="Extracting gps data from image EXIF"): + geotag_properties = get_geotag_properties_from_exif( + image, offset_angle, verbose) - create_and_log_process(image, - "geotag_process", - "success", - geotag_properties, - verbose) + create_and_log_process(image, + "geotag_process", + "success", + geotag_properties, + verbose) + else: + try: + geotag_source_path = gpx_from_exif( + process_file_list, import_path, verbose) + if not geotag_source_path or not os.path.isfile(geotag_source_path): + raise Exception + except Exception as e: + print( + "Error, failed extracting data from exif due to {}, exiting...".format(e)) + sys.exit(1) + + geotag_from_gps_trace(process_file_list, + "gpx", + geotag_source_path, + offset_time, + offset_angle, + verbose) def get_geotag_properties_from_exif(image, offset_angle=0.0, verbose=False): @@ -294,24 +314,6 @@ def get_geotag_properties_from_gps_trace(image, capture_time, gps_trace, offset_ return geotag_properties -def geotag_from_csv(process_file_list, - import_path, - geotag_source_path, - offset_time, - offset_angle, - verbose=False): - pass - - -def geotag_from_json(process_file_list, - import_path, - geotag_source_path, - offset_time, - offset_angle, - verbose=False): - pass - - def get_upload_param_properties(log_root, image, user_name, user_upload_token, user_permission_hash, user_signature_hash, user_key, verbose=False): if not os.path.isdir(log_root): From 7c12cba67c3855a2ab60330588aa0e4bb1d8d255 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 15 Oct 2018 14:05:58 +0200 Subject: [PATCH 15/36] add: gpx from exif --- mapillary_tools/gpx_from_exif.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/mapillary_tools/gpx_from_exif.py b/mapillary_tools/gpx_from_exif.py index 00915ebf..7b2cb1d5 100644 --- a/mapillary_tools/gpx_from_exif.py +++ b/mapillary_tools/gpx_from_exif.py @@ -4,17 +4,13 @@ def write_gpx(path, data): gpx = gpxpy.gpx.GPX() - gpx_track = gpxpy.gpx.GPXTrack() gpx.tracks.append(gpx_track) - gpx_segment = gpxpy.gpx.GPXTrackSegment() gpx_track.segments.append(gpx_segment) - for point in data: gpx_segment.points.append(gpxpy.gpx.GPXTrackPoint( point[1], point[2], elevation=point[3], time=point[0])) - with open(path, "w") as f: f.write(gpx.to_xml()) @@ -24,7 +20,7 @@ def get_points_from_exif(file_list, verbose=False): for file in file_list: point = () try: - exif = ExifRead(image) + exif = exif_read.ExifRead(file) except: if verbose: print("Warning, EXIF could not be read for image {}.".format(file)) @@ -47,7 +43,6 @@ def get_points_from_exif(file_list, verbose=False): point = point + (timestamp, lat, lon) else: continue - try: altitude = exif.extract_altitude() point = point + (altitude, ) @@ -67,5 +62,4 @@ def gpx_from_exif(file_list, import_path, verbose=False): data = get_points_from_exif(file_list, verbose) gpx_path = import_path + '.gpx' write_gpx(gpx_path, data) - return gpx_path From 4074dd34128bc2282c1d41ed46ed63bab037e2ac Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 15 Oct 2018 15:27:45 +0200 Subject: [PATCH 16/36] fix: issue for the time offset with geotagged images --- mapillary_tools/geo.py | 3 ++- mapillary_tools/gpx_from_exif.py | 1 + mapillary_tools/processing.py | 3 +-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/mapillary_tools/geo.py b/mapillary_tools/geo.py index bd508d08..e28324fa 100644 --- a/mapillary_tools/geo.py +++ b/mapillary_tools/geo.py @@ -160,7 +160,8 @@ def interpolate_lat_lon(points, t, max_dt=1): else: dt = (t - points[-1][0]).total_seconds() if dt > max_dt: - raise ValueError("time t not in scope of gpx file") + raise ValueError( + "time t not in scope of gpx file by {} seconds".format(dt)) else: print( "time t not in scope of gpx file by {} seconds, extrapolating...".format(dt)) diff --git a/mapillary_tools/gpx_from_exif.py b/mapillary_tools/gpx_from_exif.py index 7b2cb1d5..77b218af 100644 --- a/mapillary_tools/gpx_from_exif.py +++ b/mapillary_tools/gpx_from_exif.py @@ -60,6 +60,7 @@ def get_points_from_exif(file_list, verbose=False): def gpx_from_exif(file_list, import_path, verbose=False): data = get_points_from_exif(file_list, verbose) + data = sorted(data, key=lambda x: x[0]) gpx_path = import_path + '.gpx' write_gpx(gpx_path, data) return gpx_path diff --git a/mapillary_tools/processing.py b/mapillary_tools/processing.py index f5e0b6e6..f0e3d980 100644 --- a/mapillary_tools/processing.py +++ b/mapillary_tools/processing.py @@ -101,7 +101,7 @@ def geotag_from_exif(process_file_list, geotag_source_path, offset_time, offset_angle, - verbose) + verbose=verbose) def get_geotag_properties_from_exif(image, offset_angle=0.0, verbose=False): @@ -220,7 +220,6 @@ def geotag_from_gps_trace(process_file_list, sub_second_interval=0.0, use_gps_start_time=False, verbose=False): - # print time now to warn in case local_time if local_time: now = datetime.datetime.now(tzlocal()) From f7d79630aa30186263ec7463312c157b10e6e47f Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 15 Oct 2018 15:42:48 +0200 Subject: [PATCH 17/36] fix: help for keep_original --- mapillary_tools/commands/process.py | 2 +- mapillary_tools/commands/process_and_upload.py | 2 +- mapillary_tools/commands/video_process.py | 2 +- mapillary_tools/commands/video_process_and_upload.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mapillary_tools/commands/process.py b/mapillary_tools/commands/process.py index 2bca136b..1861c23c 100644 --- a/mapillary_tools/commands/process.py +++ b/mapillary_tools/commands/process.py @@ -83,7 +83,7 @@ def add_advanced_arguments(self, parser): # EXIF insert parser.add_argument('--skip_EXIF_insert', help='Skip inserting the extracted data into image EXIF.', action='store_true', default=False, required=False) - parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.', + parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory called "processed_images" located in .mapillary in the import_path.', action='store_true', default=False, required=False) # post process diff --git a/mapillary_tools/commands/process_and_upload.py b/mapillary_tools/commands/process_and_upload.py index 4afd92ae..bfeb56c2 100644 --- a/mapillary_tools/commands/process_and_upload.py +++ b/mapillary_tools/commands/process_and_upload.py @@ -85,7 +85,7 @@ def add_advanced_arguments(self, parser): # EXIF insert parser.add_argument('--skip_EXIF_insert', help='Skip inserting the extracted data into image EXIF.', action='store_true', default=False, required=False) - parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.', + parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory called "processed_images" located in .mapillary in the import_path.', action='store_true', default=False, required=False) parser.add_argument( '--number_threads', help='Specify the number of upload threads.', type=int, default=None, required=False) diff --git a/mapillary_tools/commands/video_process.py b/mapillary_tools/commands/video_process.py index cc2f64bb..530538a9 100644 --- a/mapillary_tools/commands/video_process.py +++ b/mapillary_tools/commands/video_process.py @@ -93,7 +93,7 @@ def add_advanced_arguments(self, parser): # EXIF insert parser.add_argument('--skip_EXIF_insert', help='Skip inserting the extracted data into image EXIF.', action='store_true', default=False, required=False) - parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.', + parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory called "processed_images" located in .mapillary in the import_path.', action='store_true', default=False, required=False) # post process parser.add_argument('--summarize', help='Summarize import for given import path.', diff --git a/mapillary_tools/commands/video_process_and_upload.py b/mapillary_tools/commands/video_process_and_upload.py index 0b5514fd..8cb47e32 100644 --- a/mapillary_tools/commands/video_process_and_upload.py +++ b/mapillary_tools/commands/video_process_and_upload.py @@ -95,7 +95,7 @@ def add_advanced_arguments(self, parser): # EXIF insert parser.add_argument('--skip_EXIF_insert', help='Skip inserting the extracted data into image EXIF.', action='store_true', default=False, required=False) - parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.', + parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory called "processed_images" located in .mapillary in the import_path.', action='store_true', default=False, required=False) parser.add_argument( '--number_threads', help='Specify the number of upload threads.', type=int, default=None, required=False) From 9c692a32ea65a7d3b11607d8f2e8c572bcee3561 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 15 Oct 2018 22:28:54 +0200 Subject: [PATCH 18/36] add: more info on blackvue and gopro support --- README.md | 58 ++++++++++--------- mapillary_tools/commands/sample_video.py | 2 +- mapillary_tools/commands/video_process.py | 2 +- .../commands/video_process_and_upload.py | 2 +- mapillary_tools/process_video.py | 37 +++++------- 5 files changed, 50 insertions(+), 51 deletions(-) diff --git a/README.md b/README.md index fb717cf0..e05df2c0 100644 --- a/README.md +++ b/README.md @@ -235,24 +235,24 @@ mapillary_tools process_and_upload --advanced --import_path "path/to/images" --u ### Video Sampling and Upload - - Sample the video `path/to/video_filename.mp4` into the directory `path/to/images`, at a sample interval of 0.5 seconds and tag the sampled images with `capture time`. Note that the video frames will always be sampled into sub directory `.mapillary/sampled_video_frames/"video_filename"`, whether import path is specified or not. In case `import_path` is specified the final path for the sampled video frames will be `"import path"/.mapillary/sampled_video_frames/"video_filename"` and in case `import_path` is not specified, the final path for the sampled video frames will be `path/to/.mapillary/sampled_video_frames/"video_filename"`. + - Sample the video(s) located in `path/to/videos` into the directory `path/to/images`, at a sample interval of 0.5 seconds and tag the sampled images with `capture time`. Note that the video frames will always be sampled into sub directory `.mapillary/sampled_video_frames/"video_import_path"`, whether import path is specified or not. In case `import_path` is specified the final path for the sampled video frames will be `"import path"/.mapillary/sampled_video_frames/"video_import_path"` and in case `import_path` is not specified, the final path for the sampled video frames will be `path/to/.mapillary/sampled_video_frames/"video_import_path"`. ```bash -mapillary_tools sample_video --import_path "path/to/images" --video_file "path/to/video_filename.mp4" --video_sample_interval 0.5 --advanced +mapillary_tools sample_video --import_path "path/to/images" --video_import_path "path/to/videos" --video_sample_interval 0.5 --advanced ``` - - Sample the video `path/to/video_filename.mp4`, at a sample interval of 2 seconds (default value) and tag the resulting images with `capture time`. And then process and upload the resulting images for user `username_at_mapillary`, specifying a gpx track to be the source of geotag data. + - Sample the video(s) located in `path/to/videos`, at a sample interval of 2 seconds (default value) and tag the resulting images with `capture time`. And then process and upload the resulting images for user `username_at_mapillary`, specifying a gpx track to be the source of geotag data. ```bash -mapillary_tools sample_video --video_file "path/to/video_filename.mp4" --advanced -mapillary_tools process --advanced --import_path "path/to/.mapillary/sampled_video_frames/video_filename" --user_name "username_at_mapillary" --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" -mapillary_tools upload --import_path "path/to/.mapillary/sampled_video_frames/video_filename" +mapillary_tools sample_video --video_import_path "path/to/videos" --advanced +mapillary_tools process --advanced --import_path "path/to/.mapillary/sampled_video_frames/video_import_path" --user_name "username_at_mapillary" --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" +mapillary_tools upload --import_path "path/to/.mapillary/sampled_video_frames/video_import_path" ``` or ```bash -mapillary_tools video_process_and_upload --video_file "path/to/video_filename.mp4" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" +mapillary_tools video_process_and_upload --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" ``` ### Process csv @@ -362,16 +362,16 @@ Capture time is calculated based on the `video start time` and sampling interval #### Examples - - Sample the video `path/to/video_filename.mp4` at the default sampling rate 2 seconds, ie 1 video frame every 2 seconds. Video frames will be sampled into a sub directory `.mapillary/sampled_video_frames/video_filename` at the location of the video. + - Sample the video(s) located in `path/to/videos` at the default sampling rate 2 seconds, ie 1 video frame every 2 seconds. Video frames will be sampled into a sub directory `.mapillary/sampled_video_frames/video_import_path` at the location of the video. ```bash -mapillary_tools sample_video --video_file "path/to/video_filename.mp4" --advanced +mapillary_tools sample_video --video_import_path "path/to/videos" --advanced ``` -- Sample the video `path/to/video_filename.mp4` to directory `path/to/images` at a sampling rate 0.5 seconds, ie two video frames every second and specifying the video start time to be `156893940910` (milliseconds since UNIX epoch). +- Sample the video(s) located in `path/to/videos` to directory `path/to/images` at a sampling rate 0.5 seconds, ie two video frames every second and specifying the video start time to be `156893940910` (milliseconds since UNIX epoch). ```bash -mapillary_tools sample_video --import_path "path/to/images" --video_file "path/to/video_filename.mp4" --video_sample_interval 0.5 --video_start_time 156893940910 --advanced +mapillary_tools sample_video --import_path "path/to/images" --video_import_path "path/to/videos" --video_sample_interval 0.5 --video_start_time 156893940910 --advanced ``` ### `video_process` @@ -380,22 +380,10 @@ mapillary_tools sample_video --import_path "path/to/images" --video_file "path/t #### Examples - - Sample the video `path/to/video_filename.mp4` to directory `path/to/images` at the default sampling rate 2 seconds, ie 1 video frame every 2 seconds and process resulting video frames for user `mapillary_user`, reading geotag data from a GoPro video `path/to/video_filename.mp4` and specifying to derive camera direction based on `GPS`. - -```bash -mapillary_tools video_process --import_path "path/to/images" --video_file "path/to/video_filename.mp4" --user_name "mapillary_user" --advanced --geotag_source "gopro_video" --geotag_source_path "path/to/video_filename.mp4" --interpolate_directions -``` - - In case video start capture time could not be extracted or specified, images should be tagged with `capture time` from the external geotag source, by passing the argument `--use_gps_start_time`. To make sure the external source and images are aligned ok, an offset in seconds can be specified. ```bash -mapillary_tools video_process --import_path "path/to/images" --video_file "path/to/video_filename.mp4" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx" --use_gps_start_time --offset_time 2 -``` - - - Sample one or more Blackvue videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.2 seconds, ie 5 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from Blackvue videos in `path/to/videos` and specifying to derive camera direction based on `GPS` and use the `GPS` start time. - -```bash -mapillary_tools video_process --import_path "path/to/images" --video_file "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "blackvue_videos" --geotag_source_path "path/to/videos" --use_gps_start_time --interpolate_directions --video_sample_interval 0.2 +mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx" --use_gps_start_time --offset_time 2 ``` ### `video_process_and_upload` @@ -404,10 +392,10 @@ mapillary_tools video_process --import_path "path/to/images" --video_file "path/ #### Examples - - Sample the video `path/to/video_filename.mp4` to directory `path/to/images` at the default sampling rate 1 second, ie one video frame every second. Process and upload resulting video frames for user `mapillary_user`, reading geotag data from a gpx track stored in `path/to/gpx_file` video, assuming video start time can be extracted from the video file and deriving camera direction based on `GPS`. + - Sample the video(s) located in `path/to/videos` to directory `path/to/images` at the default sampling rate 1 second, ie one video frame every second. Process and upload resulting video frames for user `mapillary_user`, reading geotag data from a gpx track stored in `path/to/gpx_file` video, assuming video start time can be extracted from the video file and deriving camera direction based on `GPS`. ```bash -mapillary_tools video_process_and_upload --import_path "path/to/images" --video_file "path/to/video_filename.mp4" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" --video_sample_interval 1 --interpolate_directions +mapillary_tools video_process_and_upload --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" --video_sample_interval 1 --interpolate_directions ``` ### Process Unit Tools @@ -458,6 +446,24 @@ Process unit tools are tools executed by the `process` tool. Usage of process un `post_process` provides functionalities to help summarize and organize the results of the `process` and/or `upload` commands. +## Camera specific + +### Blackvue + + - Sample one or more Blackvue videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.2 seconds, ie 5 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the Blackvue videos in `path/to/videos` and specifying to derive camera direction based on `GPS` and use the `GPS` start time. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. + +```bash +mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "blackvue_videos" --geotag_source_path "path/to/videos" --use_gps_start_time --interpolate_directions --video_sample_interval 0.2 +``` + +### GoPro + + - Sample one or more GoPro videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.5 seconds, ie 2 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the GoPro videos in `path/to/videos` and specifying to derive camera direction based on `GPS`. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. + +```bash +mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "gopro_video" --geotag_source_path "path/to/videos" --interpolate_directions --video_sample_interval 0.5 +``` + ## Troubleshooting In case of any issues with the installation and usage of `mapillary_tools`, check this section in case it has already been addressed, otherwise, open an issue on Github. diff --git a/mapillary_tools/commands/sample_video.py b/mapillary_tools/commands/sample_video.py index 375bb278..63831696 100644 --- a/mapillary_tools/commands/sample_video.py +++ b/mapillary_tools/commands/sample_video.py @@ -8,7 +8,7 @@ class Command: def add_basic_arguments(self, parser): # video specific args - parser.add_argument('--video_file', help='Provide the path to a video file or a directory containing a set of Blackvue video files.', + parser.add_argument('--video_import_path', help='Path to a directory with one or more video files.', action='store', required=True) parser.add_argument('--video_sample_interval', help='Time interval for sampled video frames in seconds', default=2, type=float, required=False) diff --git a/mapillary_tools/commands/video_process.py b/mapillary_tools/commands/video_process.py index 530538a9..0111c313 100644 --- a/mapillary_tools/commands/video_process.py +++ b/mapillary_tools/commands/video_process.py @@ -30,7 +30,7 @@ def add_basic_arguments(self, parser): '--skip_subfolders', help='Skip all subfolders and import only the images in the given directory path.', action='store_true', default=False, required=False) # video specific args - parser.add_argument('--video_file', help='Provide the path to a video file or a directory containing a set of Blackvue video files.', + parser.add_argument('--video_import_path', help='Path to a directory with one or more video files.', action='store', default=None, required=False) parser.add_argument('--video_sample_interval', help='Time interval for sampled video frames in seconds', default=2, type=float, required=False) diff --git a/mapillary_tools/commands/video_process_and_upload.py b/mapillary_tools/commands/video_process_and_upload.py index 8cb47e32..0c44389f 100644 --- a/mapillary_tools/commands/video_process_and_upload.py +++ b/mapillary_tools/commands/video_process_and_upload.py @@ -28,7 +28,7 @@ def add_basic_arguments(self, parser): parser.add_argument('--private', help="Specify whether the import is private", action='store_true', default=False, required=False) # video specific args - parser.add_argument('--video_file', help='Provide the path to a video file or a directory containing a set of Blackvue video files.', + parser.add_argument('--video_import_path', help='Path to a directory with one or more video files.', action='store', default=None, required=False) parser.add_argument('--video_sample_interval', help='Time interval for sampled video frames in seconds', default=2, type=float, required=False) diff --git a/mapillary_tools/process_video.py b/mapillary_tools/process_video.py index 946e83c5..a6ebf013 100644 --- a/mapillary_tools/process_video.py +++ b/mapillary_tools/process_video.py @@ -41,7 +41,7 @@ def timestamps_from_filename(video_filename, return capture_times -def sample_video(video_file, +def sample_video(video_import_path, import_path, video_sample_interval=2.0, video_start_time=None, @@ -54,44 +54,37 @@ def sample_video(video_file, sys.exit(1) # command specific checks - video_file = os.path.abspath(video_file) if ( - os.path.isfile(video_file) or os.path.isdir(video_file)) else None - if not video_file: - print("Error, video path " + video_file + " does not exist, exiting...") + video_import_path = os.path.abspath( + video_import_path) if os.path.isdir(video_import_path) else None + if not video_import_path: + print("Error, video import path " + video_import_path + + " does not exist or is not a directory, please provide a path to a directory with the video(s) you wish to import, exiting...") sys.exit(1) # set sampling path - video_sampling_path = processing.sampled_video_frames_rootpath(video_file) + video_sampling_path = processing.sampled_video_frames_rootpath( + video_import_path) import_path = os.path.join(os.path.abspath(import_path), video_sampling_path) if import_path else os.path.join( - os.path.dirname(video_file), video_sampling_path) + os.path.dirname(video_import_path), video_sampling_path) print("Video sampling path set to {}".format(import_path)) # check video logs - video_upload = processing.video_upload(video_file, import_path, verbose) + video_upload = processing.video_upload( + video_import_path, import_path, verbose) if video_upload: return - if os.path.isdir(video_file): - - video_list = uploader.get_video_file_list(video_file) - for video in video_list: - extract_frames(video, - import_path, - video_sample_interval, - video_start_time, - video_duration_ratio, - verbose) - else: - # single video file - extract_frames(video_file, + video_list = uploader.get_video_file_list(video_import_path) + for video in video_list: + extract_frames(video, import_path, video_sample_interval, video_start_time, video_duration_ratio, verbose) - processing.create_and_log_video_process(video_file, import_path) + processing.create_and_log_video_process(video_import_path, import_path) def extract_frames(video_file, From 356df74357e807b6d47babf1c276cde903c05375 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 15 Oct 2018 22:35:27 +0200 Subject: [PATCH 19/36] add: more info --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e05df2c0..295f825d 100644 --- a/README.md +++ b/README.md @@ -450,7 +450,7 @@ Process unit tools are tools executed by the `process` tool. Usage of process un ### Blackvue - - Sample one or more Blackvue videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.2 seconds, ie 5 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the Blackvue videos in `path/to/videos` and specifying to derive camera direction based on `GPS` and use the `GPS` start time. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. + - Sample one or more Blackvue videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.2 seconds, ie 5 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the Blackvue videos in `path/to/videos` and specifying to derive camera direction based on `GPS` and use the `GPS` start time. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. Video frames will be geotagged after all the videos in the specified `video_import_path` have been sampled. In case video frames geotagging requires `rerun`, there is no need to rerun the entire `video_process` command, in case video frame extraction was successful, rerunning only the `process` command for the given `import_path` is sufficient. ```bash mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "blackvue_videos" --geotag_source_path "path/to/videos" --use_gps_start_time --interpolate_directions --video_sample_interval 0.2 @@ -458,7 +458,7 @@ mapillary_tools video_process --import_path "path/to/images" --video_import_path ### GoPro - - Sample one or more GoPro videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.5 seconds, ie 2 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the GoPro videos in `path/to/videos` and specifying to derive camera direction based on `GPS`. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. + - Sample one or more GoPro videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.5 seconds, ie 2 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the GoPro videos in `path/to/videos` and specifying to derive camera direction based on `GPS`. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. Video frames will be geotagged after all the videos in the specified `video_import_path` have been sampled. In case video frames geotagging requires `rerun`, there is no need to rerun the entire `video_process` command, in case video frame extraction was successful, rerunning only the `process` command for the given `import_path` is sufficient. ```bash mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "gopro_video" --geotag_source_path "path/to/videos" --interpolate_directions --video_sample_interval 0.5 From 11bc29914e0636422970498f31c602fae70361ba Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Tue, 16 Oct 2018 23:39:23 +0200 Subject: [PATCH 20/36] add: download command --- mapillary_tools/commands/__init__.py | 4 +- mapillary_tools/commands/download.py | 24 ++++ mapillary_tools/download.py | 174 +++++++++++++++++++++++++++ mapillary_tools/post_process.py | 67 ++++++----- 4 files changed, 236 insertions(+), 33 deletions(-) create mode 100644 mapillary_tools/commands/download.py create mode 100644 mapillary_tools/download.py diff --git a/mapillary_tools/commands/__init__.py b/mapillary_tools/commands/__init__.py index 8c41d986..83fe2bd0 100644 --- a/mapillary_tools/commands/__init__.py +++ b/mapillary_tools/commands/__init__.py @@ -14,6 +14,7 @@ from . import authenticate from . import interpolate from . import post_process +from . import download mapillary_tools_advanced_commands = [ sample_video, @@ -28,7 +29,8 @@ process_csv, authenticate, interpolate, - post_process + post_process, + download ] mapillary_tools_commands = [ diff --git a/mapillary_tools/commands/download.py b/mapillary_tools/commands/download.py new file mode 100644 index 00000000..c846a885 --- /dev/null +++ b/mapillary_tools/commands/download.py @@ -0,0 +1,24 @@ +import inspect +from mapillary_tools.download import download + + +class Command: + name = 'download' + help = 'Helper tool : For a specified import path, download the blurred images from Mapillary.' + + def add_basic_arguments(self, parser): + parser.add_argument("--output_folder", + help="Output folder for the downloaded images.", required=False, default=".") + parser.add_argument("--user_name", help="user name", required=True) + + def add_advanced_arguments(self, parser): + parser.add_argument( + '--number_threads', help='Specify the number of download threads.', type=int, default=10, required=False) + + def run(self, args): + + vars_args = vars(args) + download(**({k: v for k, v in vars_args.iteritems() + if k in inspect.getargspec(download).args})) + + print("Download done.") diff --git a/mapillary_tools/download.py b/mapillary_tools/download.py new file mode 100644 index 00000000..5b83b938 --- /dev/null +++ b/mapillary_tools/download.py @@ -0,0 +1,174 @@ +import processing +import uploader +from post_process import save_local_mapping +from tqdm import tqdm +import os +import signal +import sys +import threading +import time +import urllib + + +class BlurDownloader(threading.Thread): + def __init__(self, lock, downloaded_images, rows, output_folder, token): + threading.Thread.__init__(self) + + self.lock = lock + self.downloaded_images = downloaded_images + self.rows = rows + self.output_folder = output_folder + self.token = token + self.shutdown_flag = threading.Event() + + def download_file(self, image_key, filename): + download_url = "https://a.mapillary.com/v3/images/{}/download_original_uuid?client_id={}&token={}".format( + image_key, uploader.CLIENT_ID, self.token) + response = urllib.urlopen(download_url) + + if response.status_code != 200: + print(response.json()) + return False + + with open(filename, "wb") as f: + total_length = response.headers.get('content-length') + + dl = 0 + total_length = int(total_length) + for data in response.iter_content(chunk_size=4096): + dl += len(data) + f.write(data) + done = int(50 * dl / total_length) + + return True + + def run(self): + while not self.shutdown_flag.is_set(): + self.lock.acquire() + if (self.downloaded_images["nbr"] >= len(self.rows)): + self.lock.release() + break + row_entry = self.rows[self.downloaded_images["nbr"]] + self.downloaded_images["nbr"] += 1 + image_path = os.path.join(self.output_folder, row_entry[0]) + image_uuid = row_entry[1] + + if not os.path.exists(os.path.dirname(image_path)): + os.makedirs(os.path.dirname(image_path)) + + self.lock.release() + + if not os.path.isfile(image_path): + success = self.download_file(image_uuid, image_path) + self.lock.acquire() + if success: + self.downloaded_images["success"] += 1 + else: + self.downloaded_images["failed"] += 1 + self.lock.release() + else: + self.lock.acquire() + downloaded_images["success"] += 1 + self.lock.release() + + self.lock.acquire() + total = len(self.rows) + count = self.downloaded_images["nbr"] + + suffix = "({}/{} DOWNLOADED, {}/{} STILL PROCESSING)".format( + self.downloaded_images["success"], + total, + self.downloaded_images["failed"], + total, + ) + + bar_len = 60 + filled_len = int(round(bar_len * count / float(total))) + percents = round(100.0 * count / float(total), 1) + bar = '=' * filled_len + '-' * (bar_len - filled_len) + sys.stdout.write('[%s] %s%s %s\r' % (bar, percents, '%', suffix)) + sys.stdout.flush() + self.lock.release() + + +class ServiceExit(Exception): + pass + + +def service_shutdown(signum, frame): + raise ServiceExit + + +def check_files_downloaded(local_mapping, output_folder, do_sleep): + not_downloaded = 0 + + for row in local_mapping: + if not os.path.isfile(os.path.join(output_folder, row[0])): + not_downloaded += 1 + + if not_downloaded > 0: + print("Trying to download {} not yet downloaded files".format(not_downloaded)) + if do_sleep: + print("Waiting 10 seconds before next try") + time.sleep(10) + + return False + else: + print("All files are downloaded") + return True + + +def download(import_path, user_name, output_folder=".", number_threads=10, verbose=False): + total_files = uploader.get_total_file_list(import_path) + rows = [] + + local_mapping = save_local_mapping(import_path) + + signal.signal(signal.SIGTERM, service_shutdown) + signal.signal(signal.SIGINT, service_shutdown) + + try: + user_properties = uploader.authenticate_user(user_name) + except: + print("Error, user authentication failed for user " + user_name) + print("Make sure your user credentials are correct, user authentication is required for images to be downloaded from Mapillary.") + return None + if "user_upload_token" in user_properties: + token = user_properties["user_upload_token"] + else: + print("Error, failed to obtain user token, please try again.") + return None + do_sleep = False + while not check_files_downloaded(local_mapping, output_folder, do_sleep): + do_sleep = True + + lock = threading.Lock() + + downloaded_images = { + "failed": 0, + "nbr": 0, + "success": 0, + } + + threads = [] + try: + for i in range(number_threads): + t = BlurDownloader(lock, downloaded_images, + local_mapping, output_folder, token) + threads.append(t) + t.start() + while True: + any_alive = False + for t in threads: + any_alive = (any_alive or t.is_alive()) + + if not any_alive: + break + + time.sleep(0.5) + except ServiceExit: + for t in threads: + t.shutdown_flag.set() + for t in threads: + t.join() + break diff --git a/mapillary_tools/post_process.py b/mapillary_tools/post_process.py index fda8fa6d..b4f914aa 100644 --- a/mapillary_tools/post_process.py +++ b/mapillary_tools/post_process.py @@ -9,6 +9,40 @@ import exif_read +def save_local_mapping(import_path): + local_mapping_filepath = os.path.join(os.path.dirname( + import_path), import_path + "_mapillary_image_uuid_to_local_path_mapping.csv") + + total_files = uploader.get_total_file_list(import_path) + + local_mapping = [] + for file in tqdm(total_files, desc="Reading image uuids"): + image_file_uuid = None + relative_path = file.lstrip(os.path.abspath(import_path)) + log_rootpath = uploader.log_rootpath(file) + image_description_json_path = os.path.join( + log_rootpath, "mapillary_image_description.json") + if os.path.isfile(image_description_json_path): + image_description_json = processing.load_json( + image_description_json_path) + if "MAPPhotoUUID" in image_description_json: + image_file_uuid = image_description_json["MAPPhotoUUID"] + else: + print( + "Error, photo uuid not in mapillary_image_description.json log file.") + else: + image_exif = exif_read.ExifRead(file) + image_description = json.loads( + image_exif.extract_image_description()) + if "MAPPhotoUUID" in image_description: + image_file_uuid = str(image_description["MAPPhotoUUID"]) + else: + print("Warning, image {} EXIF does not contain mapillary image description and mapillary_image_description.json log file does not exist. Try to process the image using mapillary_tools.".format(file)) + if image_file_uuid: + local_mapping.append((relative_path, image_file_uuid)) + return local_mapping + + def post_process(import_path, split_import_path=None, video_file=None, @@ -48,43 +82,12 @@ def post_process(import_path, " does not exist, exiting...") sys.exit(1) if save_local_mapping: - local_mapping_filepath = os.path.join(os.path.dirname( - import_path), import_path + "_mapillary_image_uuid_to_local_path_mapping.csv") - - total_files = uploader.get_total_file_list(import_path) - - local_mapping = [] - for file in total_files: - image_file_uuid = None - relative_path = file.lstrip(os.path.abspath(import_path)) - log_rootpath = uploader.log_rootpath(file) - image_description_json_path = os.path.join( - log_rootpath, "mapillary_image_description.json") - if os.path.isfile(image_description_json_path): - image_description_json = processing.load_json( - image_description_json_path) - if "MAPPhotoUUID" in image_description_json: - image_file_uuid = image_description_json["MAPPhotoUUID"] - else: - print( - "Error, photo uuid not in mapillary_image_description.json log file.") - else: - image_exif = exif_read.ExifRead(file) - image_description = json.loads( - image_exif.extract_image_description()) - if "MAPPhotoUUID" in image_description: - image_file_uuid = str(image_description["MAPPhotoUUID"]) - else: - print("Warning, image {} EXIF does not contain mapillary image description and mapillary_image_description.json log file does not exist. Try to process the image using mapillary_tools.".format(file)) - if image_file_uuid: - local_mapping.append((relative_path, image_file_uuid)) + local_mapping = save_local_mapping(import_path) with open(local_mapping_filepath, "w") as csvfile: csvwriter = csv.writer(csvfile, delimiter=",") for row in local_mapping: csvwriter.writerow(row) - else: - print("Reading import logs for import path {}...".format(import_path)) # collect logs From 89004e1529b5d72a3866fcf28e1ec4721df6935a Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Wed, 17 Oct 2018 21:38:27 +0200 Subject: [PATCH 21/36] add: fix response --- mapillary_tools/download.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mapillary_tools/download.py b/mapillary_tools/download.py index 5b83b938..89cff188 100644 --- a/mapillary_tools/download.py +++ b/mapillary_tools/download.py @@ -7,7 +7,7 @@ import sys import threading import time -import urllib +import requests class BlurDownloader(threading.Thread): @@ -24,7 +24,7 @@ def __init__(self, lock, downloaded_images, rows, output_folder, token): def download_file(self, image_key, filename): download_url = "https://a.mapillary.com/v3/images/{}/download_original_uuid?client_id={}&token={}".format( image_key, uploader.CLIENT_ID, self.token) - response = urllib.urlopen(download_url) + response = requests.get(download_url, stream=True) if response.status_code != 200: print(response.json()) @@ -68,7 +68,7 @@ def run(self): self.lock.release() else: self.lock.acquire() - downloaded_images["success"] += 1 + self.downloaded_images["success"] += 1 self.lock.release() self.lock.acquire() From ffbcf038e0277545ab93480fdcc7991aefc8eeb1 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Fri, 19 Oct 2018 11:59:27 +0200 Subject: [PATCH 22/36] add: changing arg setting --- mapillary_tools/commands/download.py | 2 +- mapillary_tools/download.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mapillary_tools/commands/download.py b/mapillary_tools/commands/download.py index c846a885..43aabbd6 100644 --- a/mapillary_tools/commands/download.py +++ b/mapillary_tools/commands/download.py @@ -8,7 +8,7 @@ class Command: def add_basic_arguments(self, parser): parser.add_argument("--output_folder", - help="Output folder for the downloaded images.", required=False, default=".") + help="Output folder for the downloaded images.", required=True) parser.add_argument("--user_name", help="user name", required=True) def add_advanced_arguments(self, parser): diff --git a/mapillary_tools/download.py b/mapillary_tools/download.py index 89cff188..dedae2bb 100644 --- a/mapillary_tools/download.py +++ b/mapillary_tools/download.py @@ -118,7 +118,7 @@ def check_files_downloaded(local_mapping, output_folder, do_sleep): return True -def download(import_path, user_name, output_folder=".", number_threads=10, verbose=False): +def download(import_path, user_name, output_folder, number_threads=10, verbose=False): total_files = uploader.get_total_file_list(import_path) rows = [] From 8a44647bc313788cc04ba43dfa13a9e00a39dca0 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 22 Oct 2018 23:54:33 +0200 Subject: [PATCH 23/36] add: change done file sending logic no more publishing step, no more manual_done arg, done file is send per sequence --- README.md | 4 +- .../commands/process_and_upload.py | 2 - mapillary_tools/commands/upload.py | 2 - .../commands/video_process_and_upload.py | 2 - mapillary_tools/upload.py | 50 ++++++--------- mapillary_tools/uploader.py | 62 ++++++++++++------- 6 files changed, 61 insertions(+), 61 deletions(-) diff --git a/README.md b/README.md index 295f825d..3e6ba49a 100644 --- a/README.md +++ b/README.md @@ -326,10 +326,10 @@ By default, 4 threads upload in parallel and the script retries 10 times upon en mapillary_tools upload --import_path "path/to/images" ``` - - upload all images in the directory `path/to/images`, while skipping its sub directories and prompting the user to finalize the upload: + - upload all images in the directory `path/to/images`, while skipping its sub directories: ```bash -mapillary_tools upload --import_path "path/to/images" --skip_subfolders --manual_done +mapillary_tools upload --import_path "path/to/images" --skip_subfolders ``` This tool has no additional advanced arguments. diff --git a/mapillary_tools/commands/process_and_upload.py b/mapillary_tools/commands/process_and_upload.py index bfeb56c2..689d80fd 100644 --- a/mapillary_tools/commands/process_and_upload.py +++ b/mapillary_tools/commands/process_and_upload.py @@ -26,8 +26,6 @@ def add_basic_arguments(self, parser): '--organization_key', help="Specify organization key", default=None, required=False) parser.add_argument('--private', help="Specify whether the import is private", action='store_true', default=False, required=False) - parser.add_argument( - '--manual_done', help='Manually finalize the upload', action='store_true', default=False, required=False) parser.add_argument( '--skip_subfolders', help='Skip all subfolders and import only the images in the given directory path.', action='store_true', default=False, required=False) diff --git a/mapillary_tools/commands/upload.py b/mapillary_tools/commands/upload.py index 422ca309..9913e4f8 100644 --- a/mapillary_tools/commands/upload.py +++ b/mapillary_tools/commands/upload.py @@ -10,8 +10,6 @@ class Command: def add_basic_arguments(self, parser): # command specific args - parser.add_argument( - '--manual_done', help='Manually finalize the upload', action='store_true', default=False, required=False) parser.add_argument( '--skip_subfolders', help='Skip all subfolders and import only the images in the given directory path.', action='store_true', default=False, required=False) diff --git a/mapillary_tools/commands/video_process_and_upload.py b/mapillary_tools/commands/video_process_and_upload.py index 0c44389f..554e9779 100644 --- a/mapillary_tools/commands/video_process_and_upload.py +++ b/mapillary_tools/commands/video_process_and_upload.py @@ -36,8 +36,6 @@ def add_basic_arguments(self, parser): help="Real time video duration ratio of the under or oversampled video duration.", type=float, default=1.0, required=False) parser.add_argument("--video_start_time", help="Video start time in epochs (milliseconds)", type=int, default=None, required=False) - parser.add_argument( - '--manual_done', help='Manually finalize the upload', action='store_true', default=False, required=False) parser.add_argument( '--skip_subfolders', help='Skip all subfolders and import only the images in the given directory path.', action='store_true', default=False, required=False) diff --git a/mapillary_tools/upload.py b/mapillary_tools/upload.py index 500c2969..a76db4ff 100755 --- a/mapillary_tools/upload.py +++ b/mapillary_tools/upload.py @@ -7,14 +7,13 @@ from exif_aux import verify_mapillary_tag -def upload(import_path, manual_done=False, verbose=False, skip_subfolders=False, video_file=None, number_threads=None, max_attempts=None): +def upload(import_path, verbose=False, skip_subfolders=False, video_file=None, number_threads=None, max_attempts=None): ''' Upload local images to Mapillary Args: import_path: Directory path to where the images are stored. verbose: Print extra warnings and errors. skip_subfolders: Skip images stored in subdirectories. - manual_done: Prompt user to confirm upload finalization. Returns: Images are uploaded to Mapillary and flagged locally as uploaded. @@ -69,9 +68,12 @@ def upload(import_path, manual_done=False, verbose=False, skip_subfolders=False, print('Please check if all images contain the required Mapillary metadata. If not, you can use "mapillary_tools process" to add them') sys.exit(1) - # get upload params + # get upload params for the manual upload images, group them per sequence + # and separate direct upload images params = {} - for image in total_file_list: + list_per_sequence_mapping = {} + direct_upload_file_list = [] + for image in upload_file_list: log_root = uploader.log_rootpath(image) upload_params_path = os.path.join( log_root, "upload_params_process.json") @@ -79,37 +81,25 @@ def upload(import_path, manual_done=False, verbose=False, skip_subfolders=False, with open(upload_params_path, "rb") as jf: params[image] = json.load( jf, object_hook=uploader.ascii_encode_dict) + sequence = params[image]["key"] + if sequence in list_per_sequence_mapping: + list_per_sequence_mapping[sequence].append(image) + else: + list_per_sequence_mapping[sequence] = [image] + else: + direct_upload_file_list.append(image) # inform how many images are to be uploaded and how many are being skipped # from upload + print("Uploading {} images with valid mapillary tags (Skipping {})".format( len(upload_file_list), len(total_file_list) - len(upload_file_list))) - # call the actual upload, passing the list of images, the root of the - # import and the upload params - uploader.upload_file_list(upload_file_list, params, - number_threads, max_attempts) - - # finalize manual uploads if necessary - finalize_file_list = uploader.get_finalize_file_list( - import_path, skip_subfolders) - - # if manual uploads a DONE file needs to be uploaded to let the harvester - # know the sequence is done uploading - if len(finalize_file_list): - finalize_all = 1 - if manual_done: - finalize_all = uploader.prompt_to_finalize("uploads") - if finalize_all: - # get the s3 locations of the sequences - finalize_params = uploader.process_upload_finalization( - finalize_file_list, params) - uploader.finalize_upload(finalize_params) - # flag finalization for each file - uploader.flag_finalization(finalize_file_list) - else: - print("Uploads will not be finalized.") - print("If you wish to finalize your uploads, run the upload tool again.") - sys.exit() + if len(direct_upload_file_list): + uploader.upload_file_list_direct( + direct_upload_file_list, number_threads, max_attempts) + for idx, sequence in enumerate(list_per_sequence_mapping): + uploader.upload_file_list_manual( + list_per_sequence_mapping[sequence], params, idx, number_threads, max_attempts) uploader.print_summary(upload_file_list) diff --git a/mapillary_tools/uploader.py b/mapillary_tools/uploader.py index 8b04ca5b..0f85715e 100644 --- a/mapillary_tools/uploader.py +++ b/mapillary_tools/uploader.py @@ -137,22 +137,6 @@ def prompt_to_finalize(subcommand): return 0 -def process_upload_finalization(file_list, params): - list_params = [] - keys = [] - for file in file_list: - if file in params: - if params[file]["key"] not in keys: - keys.append(params[file]["key"]) - list_params.append(params[file]) - return list_params - - -def finalize_upload(finalize_params): - for params in finalize_params: - upload_done_file(**params) - - def flag_finalization(finalize_file_list): for file in finalize_file_list: finalize_flag = os.path.join(log_rootpath(file), "upload_finalized") @@ -645,21 +629,18 @@ def ascii_encode(x): return x.encode('ascii') return dict(map(ascii_encode, pair) for pair in data.items()) -def upload_file_list(file_list, file_params={}, number_threads=None, max_attempts=None): - +def upload_file_list_direct(file_list, number_threads=None, max_attempts=None): # set some uploader params first if number_threads == None: number_threads = NUMBER_THREADS if max_attempts == None: max_attempts = MAX_ATTEMPTS - # create upload queue with all files + # create upload queue with all files per sequence + q = Queue() for filepath in file_list: - if filepath not in file_params: - q.put((filepath, max_attempts, UPLOAD_PARAMS)) - else: - q.put((filepath, max_attempts, file_params[filepath])) + q.put((filepath, max_attempts, UPLOAD_PARAMS)) # create uploader threads uploaders = [UploadThread(q) for i in range(number_threads)] @@ -681,6 +662,41 @@ def upload_file_list(file_list, file_params={}, number_threads=None, max_attempt sys.exit(1) +def upload_file_list_manual(file_list, file_params, sequence_idx, number_threads=None, max_attempts=None): + # set some uploader params first + if number_threads == None: + number_threads = NUMBER_THREADS + if max_attempts == None: + max_attempts = MAX_ATTEMPTS + + # create upload queue with all files per sequence + q = Queue() + for filepath in file_list: + q.put((filepath, max_attempts, file_params[filepath])) + # create uploader threads + uploaders = [UploadThread(q) for i in range(number_threads)] + + # start uploaders as daemon threads that can be stopped (ctrl-c) + try: + print("Uploading {}. sequence with {} threads".format( + sequence_idx + 1, number_threads)) + for uploader in uploaders: + uploader.daemon = True + uploader.start() + + for idx, uploader in enumerate(uploaders): + uploaders[idx].join(1) + + while q.unfinished_tasks: + time.sleep(1) + q.join() + except (KeyboardInterrupt, SystemExit): + print("\nBREAK: Stopping upload.") + sys.exit(1) + upload_done_file(**file_params[filepath]) + flag_finalization(file_list) + + def log_rootpath(filepath): return os.path.join(os.path.dirname(filepath), ".mapillary", "logs", os.path.splitext(os.path.basename(filepath))[0]) From 567c9774f83fcb284a593f2b0eb2d1352361e64c Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Tue, 23 Oct 2018 11:04:21 +0200 Subject: [PATCH 24/36] add: clarification in README regarding the account credentials --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3e6ba49a..7100f352 100644 --- a/README.md +++ b/README.md @@ -74,7 +74,7 @@ To install `ffmpeg` on Windows, follow these [instructions](http://adaptivesampl ### User Authentication -To upload images to Mapillary, an account is required and can be created [here](https://www.mapillary.com/signup). When using the upload tools for the first time, user authentication is required. You will be prompted to enter your account credentials. +To upload images to Mapillary, an account is required and can be created [here](https://www.mapillary.com/signup). When using the upload tools for the first time, user authentication is required. You will be prompted to enter your account credentials. Only Mapilary account credentials are valid for authentication in mapillary_tools, unlike in web uploads, where other account credentials can be used as well. ### Metadata From cd130f44fbeef792d41d10c214fa14161ddff0b8 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Wed, 24 Oct 2018 13:46:59 +0200 Subject: [PATCH 25/36] bug: spaces in the path --- mapillary_tools/process_video.py | 1 - 1 file changed, 1 deletion(-) diff --git a/mapillary_tools/process_video.py b/mapillary_tools/process_video.py index a6ebf013..bf8d8926 100644 --- a/mapillary_tools/process_video.py +++ b/mapillary_tools/process_video.py @@ -97,7 +97,6 @@ def extract_frames(video_file, if verbose: print('extracting frames from {}'.format(video_file)) - video_file = video_file.replace(" ", "\ ") video_filename = os.path.basename(video_file).rstrip(".mp4") command = [ From ff254e523ad8adb6ceb3f3ce55cc6fcb3a1ffe5b Mon Sep 17 00:00:00 2001 From: Ryan Cook Date: Fri, 26 Oct 2018 17:39:35 -0500 Subject: [PATCH 26/36] Update setup.py modify name of version on branch --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c6665261..0bbadf89 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup setup(name='mapillary_tools', - version='0.2.0', + version='0.3.0', description='Mapillary Image/Video Import Pipeline', url='https://github.com/mapillary/mapillary_tools', author='Mapillary', From abefa983da4ad7dfea815bceff99c67f6f5fefe9 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 29 Oct 2018 00:19:07 +0100 Subject: [PATCH 27/36] add: check for ffmpeg in video_process --- mapillary_tools/process_video.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/mapillary_tools/process_video.py b/mapillary_tools/process_video.py index bf8d8926..fa593f38 100644 --- a/mapillary_tools/process_video.py +++ b/mapillary_tools/process_video.py @@ -109,7 +109,15 @@ def extract_frames(video_file, command.append('{}/{}_%0{}d.jpg'.format(import_path, video_filename, ZERO_PADDING)) - subprocess.call(command) + try: + subprocess.call(command) + except OSError as e: + print("Error, ffmpeg is not installed or set in the OS system path.") + sys.exit(1) + except Exception as e: + print("Error, could not extract frames from video {} due to {}".format( + video_file, e)) + sys.exit(1) if video_start_time: video_start_time = datetime.datetime.utcfromtimestamp( From bc52c16d09b016e7080d4943a626cf0e26f759e2 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 29 Oct 2018 00:36:54 +0100 Subject: [PATCH 28/36] add: arg to ffmpeg to prevent reading from stdin --- mapillary_tools/process_video.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mapillary_tools/process_video.py b/mapillary_tools/process_video.py index fa593f38..070f7f8e 100644 --- a/mapillary_tools/process_video.py +++ b/mapillary_tools/process_video.py @@ -104,7 +104,7 @@ def extract_frames(video_file, '-i', video_file, '-loglevel', 'quiet', '-vf', 'fps=1/{}'.format(video_sample_interval), - '-qscale', '1', + '-qscale', '1', '-nostdin' ] command.append('{}/{}_%0{}d.jpg'.format(import_path, From b49e65221700d9a43cc24120981cba02fcab45ad Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Mon, 29 Oct 2018 20:56:44 +0100 Subject: [PATCH 29/36] add: dont overwrite rest of exif tags unless arg is passed --- mapillary_tools/commands/exif_insert.py | 2 + mapillary_tools/commands/process.py | 2 + .../commands/process_and_upload.py | 3 ++ mapillary_tools/commands/video_process.py | 2 + .../commands/video_process_and_upload.py | 2 + mapillary_tools/insert_MAPJson.py | 6 ++- mapillary_tools/processing.py | 48 ++++++++++--------- 7 files changed, 40 insertions(+), 25 deletions(-) diff --git a/mapillary_tools/commands/exif_insert.py b/mapillary_tools/commands/exif_insert.py index 38df75ec..391c90b4 100644 --- a/mapillary_tools/commands/exif_insert.py +++ b/mapillary_tools/commands/exif_insert.py @@ -20,6 +20,8 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.', action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + action='store_true', default=False, required=False) def run(self, args): diff --git a/mapillary_tools/commands/process.py b/mapillary_tools/commands/process.py index 1861c23c..35751bcd 100644 --- a/mapillary_tools/commands/process.py +++ b/mapillary_tools/commands/process.py @@ -101,6 +101,8 @@ def add_advanced_arguments(self, parser): '--split_import_path', help='If splitting the import path into duplicates, sequences, success and failed uploads, provide a path for the splits.', default=None, required=False) parser.add_argument('--save_local_mapping', help='Save the mapillary photo uuid to local file mapping in a csv.', action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + action='store_true', default=False, required=False) # add custom meta data in a form of a string consisting of a triplet # "name,type,value" diff --git a/mapillary_tools/commands/process_and_upload.py b/mapillary_tools/commands/process_and_upload.py index 689d80fd..8be8b9fd 100644 --- a/mapillary_tools/commands/process_and_upload.py +++ b/mapillary_tools/commands/process_and_upload.py @@ -106,6 +106,9 @@ def add_advanced_arguments(self, parser): parser.add_argument('--save_local_mapping', help='Save the mapillary photo uuid to local file mapping in a csv.', action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + action='store_true', default=False, required=False) + # add custom meta data in a form of a string consisting of a triplet # "name,type,value" parser.add_argument('--custom_meta_data', help='Add custom meta data to all images. Required format of input is a string, consisting of the meta data name, type and value, separated by a comma for each entry, where entries are separated by semicolon. Supported types are long, double, string, boolean, date. Example for two meta data entries "random_name1,double,12.34;random_name2,long,1234"', diff --git a/mapillary_tools/commands/video_process.py b/mapillary_tools/commands/video_process.py index 0111c313..b1e6735d 100644 --- a/mapillary_tools/commands/video_process.py +++ b/mapillary_tools/commands/video_process.py @@ -95,6 +95,8 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory called "processed_images" located in .mapillary in the import_path.', action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + action='store_true', default=False, required=False) # post process parser.add_argument('--summarize', help='Summarize import for given import path.', action='store_true', default=False, required=False) diff --git a/mapillary_tools/commands/video_process_and_upload.py b/mapillary_tools/commands/video_process_and_upload.py index 554e9779..643918e6 100644 --- a/mapillary_tools/commands/video_process_and_upload.py +++ b/mapillary_tools/commands/video_process_and_upload.py @@ -99,6 +99,8 @@ def add_advanced_arguments(self, parser): '--number_threads', help='Specify the number of upload threads.', type=int, default=None, required=False) parser.add_argument( '--max_attempts', help='Specify the maximum number of attempts to upload.', type=int, default=None, required=False) + parser.add_argument('--overwrite_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + action='store_true', default=False, required=False) # post process parser.add_argument('--summarize', help='Summarize import for given import path.', action='store_true', default=False, required=False) diff --git a/mapillary_tools/insert_MAPJson.py b/mapillary_tools/insert_MAPJson.py index 2c87bbdd..3dcb6ff2 100644 --- a/mapillary_tools/insert_MAPJson.py +++ b/mapillary_tools/insert_MAPJson.py @@ -14,7 +14,8 @@ def insert_MAPJson(import_path, skip_subfolders=False, skip_EXIF_insert=False, keep_original=False, - video_file=None): + video_file=None, + overwrite_EXIF_tags=False): # sanity check if video file is passed if video_file and not (os.path.isdir(video_file) or os.path.isfile(video_file)): @@ -61,7 +62,8 @@ def insert_MAPJson(import_path, master_upload, verbose, skip_EXIF_insert, - keep_original) + keep_original, + overwrite_EXIF_tags) processing.create_and_log_process(image, "mapillary_image_description", diff --git a/mapillary_tools/processing.py b/mapillary_tools/processing.py index 05407ba7..eadf974d 100644 --- a/mapillary_tools/processing.py +++ b/mapillary_tools/processing.py @@ -353,7 +353,8 @@ def get_upload_param_properties(log_root, image, user_name, user_upload_token, u if os.getenv("AWS_S3_ENDPOINT", None) is None: url = "https://s3-eu-west-1.amazonaws.com/mapillary.uploads.manual.images" else: - url = "{}/{}".format(os.getenv("AWS_S3_ENDPOINT"), "mtf-manual-uploads-images") + url = "{}/{}".format(os.getenv("AWS_S3_ENDPOINT"), + "mtf-manual-uploads-images") upload_params = { "url": url, @@ -376,7 +377,7 @@ def get_upload_param_properties(log_root, image, user_name, user_upload_token, u return upload_params -def get_final_mapillary_image_description(log_root, image, master_upload=False, verbose=False, skip_EXIF_insert=False, keep_original=False): +def get_final_mapillary_image_description(log_root, image, master_upload=False, verbose=False, skip_EXIF_insert=False, keep_original=False, overwrite_EXIF_tags=False): sub_commands = ["user_process", "geotag_process", "sequence_process", "upload_params_process", "settings_upload_hash", "import_meta_data_process"] @@ -445,27 +446,28 @@ def get_final_mapillary_image_description(log_root, image, master_upload=False, return None # also try to set time and gps so image can be placed on the map for testing and # qc purposes - try: - image_exif.add_date_time_original(datetime.datetime.strptime( - final_mapillary_image_description["MAPCaptureTime"], '%Y_%m_%d_%H_%M_%S_%f')) - except: - pass - try: - image_exif.add_lat_lon( - final_mapillary_image_description["MAPLatitude"], final_mapillary_image_description["MAPLongitude"]) - except: - pass - try: - image_exif.add_direction( - final_mapillary_image_description["MAPCompassHeading"]["TrueHeading"]) - except: - pass - try: - if "MAPOrientation" in final_mapillary_image_description: - image_exif.add_orientation( - final_mapillary_image_description["MAPOrientation"]) - except: - pass + if overwrite_EXIF_tags: + try: + image_exif.add_date_time_original(datetime.datetime.strptime( + final_mapillary_image_description["MAPCaptureTime"], '%Y_%m_%d_%H_%M_%S_%f')) + except: + pass + try: + image_exif.add_lat_lon( + final_mapillary_image_description["MAPLatitude"], final_mapillary_image_description["MAPLongitude"]) + except: + pass + try: + image_exif.add_direction( + final_mapillary_image_description["MAPCompassHeading"]["TrueHeading"]) + except: + pass + try: + if "MAPOrientation" in final_mapillary_image_description: + image_exif.add_orientation( + final_mapillary_image_description["MAPOrientation"]) + except: + pass filename = image filename_keep_original = processed_images_rootpath(image) if os.path.isfile(filename_keep_original): From bc9fee479cf56d32b843df75cc6abce83bd77aa7 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Tue, 30 Oct 2018 11:43:41 +0100 Subject: [PATCH 30/36] add: more options on overwriting EXIF tags --- mapillary_tools/commands/exif_insert.py | 10 ++++++- mapillary_tools/commands/process.py | 11 +++++-- .../commands/process_and_upload.py | 12 ++++++-- mapillary_tools/commands/video_process.py | 10 ++++++- .../commands/video_process_and_upload.py | 10 ++++++- mapillary_tools/insert_MAPJson.py | 12 ++++++-- mapillary_tools/processing.py | 30 +++++++++++++++++-- 7 files changed, 83 insertions(+), 12 deletions(-) diff --git a/mapillary_tools/commands/exif_insert.py b/mapillary_tools/commands/exif_insert.py index 391c90b4..5b00d58d 100644 --- a/mapillary_tools/commands/exif_insert.py +++ b/mapillary_tools/commands/exif_insert.py @@ -20,7 +20,15 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.', action='store_true', default=False, required=False) - parser.add_argument('--overwrite_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + parser.add_argument('--overwrite_all_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_time_tag', help='Overwrite the capture time EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_gps_tag', help='Overwrite the gps EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_direction_tag', help='Overwrite the camera direction EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_orientation_tag', help='Overwrite the orientation EXIF tag with the value obtained in process.', action='store_true', default=False, required=False) def run(self, args): diff --git a/mapillary_tools/commands/process.py b/mapillary_tools/commands/process.py index 35751bcd..8e5420a5 100644 --- a/mapillary_tools/commands/process.py +++ b/mapillary_tools/commands/process.py @@ -101,9 +101,16 @@ def add_advanced_arguments(self, parser): '--split_import_path', help='If splitting the import path into duplicates, sequences, success and failed uploads, provide a path for the splits.', default=None, required=False) parser.add_argument('--save_local_mapping', help='Save the mapillary photo uuid to local file mapping in a csv.', action='store_true', default=False, required=False) - parser.add_argument('--overwrite_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + parser.add_argument('--overwrite_all_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_time_tag', help='Overwrite the capture time EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_gps_tag', help='Overwrite the gps EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_direction_tag', help='Overwrite the camera direction EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_orientation_tag', help='Overwrite the orientation EXIF tag with the value obtained in process.', action='store_true', default=False, required=False) - # add custom meta data in a form of a string consisting of a triplet # "name,type,value" parser.add_argument('--custom_meta_data', help='Add custom meta data to all images. Required format of input is a string, consisting of the meta data name, type and value, separated by a comma for each entry, where entries are separated by semicolon. Supported types are long, double, string, boolean, date. Example for two meta data entries "random_name1,double,12.34;random_name2,long,1234"', diff --git a/mapillary_tools/commands/process_and_upload.py b/mapillary_tools/commands/process_and_upload.py index 8be8b9fd..f5ea8adc 100644 --- a/mapillary_tools/commands/process_and_upload.py +++ b/mapillary_tools/commands/process_and_upload.py @@ -105,10 +105,16 @@ def add_advanced_arguments(self, parser): '--split_import_path', help='If splitting the import path into duplicates, sequences, success and failed uploads, provide a path for the splits.', default=None, required=False) parser.add_argument('--save_local_mapping', help='Save the mapillary photo uuid to local file mapping in a csv.', action='store_true', default=False, required=False) - - parser.add_argument('--overwrite_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + parser.add_argument('--overwrite_all_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_time_tag', help='Overwrite the capture time EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_gps_tag', help='Overwrite the gps EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_direction_tag', help='Overwrite the camera direction EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_orientation_tag', help='Overwrite the orientation EXIF tag with the value obtained in process.', action='store_true', default=False, required=False) - # add custom meta data in a form of a string consisting of a triplet # "name,type,value" parser.add_argument('--custom_meta_data', help='Add custom meta data to all images. Required format of input is a string, consisting of the meta data name, type and value, separated by a comma for each entry, where entries are separated by semicolon. Supported types are long, double, string, boolean, date. Example for two meta data entries "random_name1,double,12.34;random_name2,long,1234"', diff --git a/mapillary_tools/commands/video_process.py b/mapillary_tools/commands/video_process.py index b1e6735d..378fb46c 100644 --- a/mapillary_tools/commands/video_process.py +++ b/mapillary_tools/commands/video_process.py @@ -95,7 +95,15 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory called "processed_images" located in .mapillary in the import_path.', action='store_true', default=False, required=False) - parser.add_argument('--overwrite_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + parser.add_argument('--overwrite_all_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_time_tag', help='Overwrite the capture time EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_gps_tag', help='Overwrite the gps EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_direction_tag', help='Overwrite the camera direction EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_orientation_tag', help='Overwrite the orientation EXIF tag with the value obtained in process.', action='store_true', default=False, required=False) # post process parser.add_argument('--summarize', help='Summarize import for given import path.', diff --git a/mapillary_tools/commands/video_process_and_upload.py b/mapillary_tools/commands/video_process_and_upload.py index 643918e6..8b287c50 100644 --- a/mapillary_tools/commands/video_process_and_upload.py +++ b/mapillary_tools/commands/video_process_and_upload.py @@ -99,7 +99,15 @@ def add_advanced_arguments(self, parser): '--number_threads', help='Specify the number of upload threads.', type=int, default=None, required=False) parser.add_argument( '--max_attempts', help='Specify the maximum number of attempts to upload.', type=int, default=None, required=False) - parser.add_argument('--overwrite_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + parser.add_argument('--overwrite_all_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_time_tag', help='Overwrite the capture time EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_gps_tag', help='Overwrite the gps EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_direction_tag', help='Overwrite the camera direction EXIF tag with the value obtained in process.', + action='store_true', default=False, required=False) + parser.add_argument('--overwrite_EXIF_orientation_tag', help='Overwrite the orientation EXIF tag with the value obtained in process.', action='store_true', default=False, required=False) # post process parser.add_argument('--summarize', help='Summarize import for given import path.', diff --git a/mapillary_tools/insert_MAPJson.py b/mapillary_tools/insert_MAPJson.py index 3dcb6ff2..99826368 100644 --- a/mapillary_tools/insert_MAPJson.py +++ b/mapillary_tools/insert_MAPJson.py @@ -15,7 +15,11 @@ def insert_MAPJson(import_path, skip_EXIF_insert=False, keep_original=False, video_file=None, - overwrite_EXIF_tags=False): + overwrite_all_EXIF_tags=False, + overwrite_EXIF_time_tag=False, + overwrite_EXIF_gps_tag=False, + overwrite_EXIF_direction_tag=False, + overwrite_EXIF_orientation_tag=False): # sanity check if video file is passed if video_file and not (os.path.isdir(video_file) or os.path.isfile(video_file)): @@ -63,7 +67,11 @@ def insert_MAPJson(import_path, verbose, skip_EXIF_insert, keep_original, - overwrite_EXIF_tags) + overwrite_all_EXIF_tags, + overwrite_EXIF_time_tag, + overwrite_EXIF_gps_tag, + overwrite_EXIF_direction_tag, + overwrite_EXIF_orientation_tag) processing.create_and_log_process(image, "mapillary_image_description", diff --git a/mapillary_tools/processing.py b/mapillary_tools/processing.py index eadf974d..8ce5f03e 100644 --- a/mapillary_tools/processing.py +++ b/mapillary_tools/processing.py @@ -377,7 +377,7 @@ def get_upload_param_properties(log_root, image, user_name, user_upload_token, u return upload_params -def get_final_mapillary_image_description(log_root, image, master_upload=False, verbose=False, skip_EXIF_insert=False, keep_original=False, overwrite_EXIF_tags=False): +def get_final_mapillary_image_description(log_root, image, master_upload=False, verbose=False, skip_EXIF_insert=False, keep_original=False, overwrite_all_EXIF_tags=False, overwrite_EXIF_time_tag=False, overwrite_EXIF_gps_tag=False, overwrite_EXIF_direction_tag=False, overwrite_EXIF_orientation_tag=False): sub_commands = ["user_process", "geotag_process", "sequence_process", "upload_params_process", "settings_upload_hash", "import_meta_data_process"] @@ -446,7 +446,7 @@ def get_final_mapillary_image_description(log_root, image, master_upload=False, return None # also try to set time and gps so image can be placed on the map for testing and # qc purposes - if overwrite_EXIF_tags: + if overwrite_all_EXIF_tags: try: image_exif.add_date_time_original(datetime.datetime.strptime( final_mapillary_image_description["MAPCaptureTime"], '%Y_%m_%d_%H_%M_%S_%f')) @@ -468,6 +468,32 @@ def get_final_mapillary_image_description(log_root, image, master_upload=False, final_mapillary_image_description["MAPOrientation"]) except: pass + else: + if overwrite_EXIF_time_tag: + try: + image_exif.add_date_time_original(datetime.datetime.strptime( + final_mapillary_image_description["MAPCaptureTime"], '%Y_%m_%d_%H_%M_%S_%f')) + except: + pass + if overwrite_EXIF_gps_tag: + try: + image_exif.add_lat_lon( + final_mapillary_image_description["MAPLatitude"], final_mapillary_image_description["MAPLongitude"]) + except: + pass + if overwrite_EXIF_direction_tag: + try: + image_exif.add_direction( + final_mapillary_image_description["MAPCompassHeading"]["TrueHeading"]) + except: + pass + if overwrite_EXIF_orientation_tag: + try: + if "MAPOrientation" in final_mapillary_image_description: + image_exif.add_orientation( + final_mapillary_image_description["MAPOrientation"]) + except: + pass filename = image filename_keep_original = processed_images_rootpath(image) if os.path.isfile(filename_keep_original): From 4b2c47eba314cf1dd58c2b88d31c599140ecb169 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Tue, 30 Oct 2018 11:53:24 +0100 Subject: [PATCH 31/36] fix: help for sub process command --- mapillary_tools/commands/exif_insert.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mapillary_tools/commands/exif_insert.py b/mapillary_tools/commands/exif_insert.py index 5b00d58d..4f0b2474 100644 --- a/mapillary_tools/commands/exif_insert.py +++ b/mapillary_tools/commands/exif_insert.py @@ -18,7 +18,7 @@ def add_advanced_arguments(self, parser): action='store_true', default=False, required=False) parser.add_argument('--skip_EXIF_insert', help='Skip inserting the extracted data into image EXIF.', action='store_true', default=False, required=False) - parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.', + parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory called "processed_images" located in .mapillary in the import_path.', action='store_true', default=False, required=False) parser.add_argument('--overwrite_all_EXIF_tags', help='Overwrite the rest of the EXIF tags, whose values are changed during the processing. Default is False, which will result in the processed values to be inserted only in the EXIF Image Description tag.', action='store_true', default=False, required=False) From 74a6844c3b7c372e4fc1890fc37b9ddbbbc3bfbc Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Tue, 30 Oct 2018 12:10:49 +0100 Subject: [PATCH 32/36] add: note in readme to specify blackvue camera make and model --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7100f352..6703dfcf 100644 --- a/README.md +++ b/README.md @@ -450,10 +450,10 @@ Process unit tools are tools executed by the `process` tool. Usage of process un ### Blackvue - - Sample one or more Blackvue videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.2 seconds, ie 5 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the Blackvue videos in `path/to/videos` and specifying to derive camera direction based on `GPS` and use the `GPS` start time. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. Video frames will be geotagged after all the videos in the specified `video_import_path` have been sampled. In case video frames geotagging requires `rerun`, there is no need to rerun the entire `video_process` command, in case video frame extraction was successful, rerunning only the `process` command for the given `import_path` is sufficient. + - Sample one or more Blackvue videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.2 seconds, ie 5 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the Blackvue videos in `path/to/videos` and specifying camera make and model, specifying to derive camera direction based on `GPS` and use the `GPS` start time. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. Video frames will be geotagged after all the videos in the specified `video_import_path` have been sampled. In case video frames geotagging requires `rerun`, there is no need to rerun the entire `video_process` command, in case video frame extraction was successful, rerunning only the `process` command for the given `import_path` is sufficient. We encourage users to check and specify camera make and model, since it helps with camera calibration and improves 3D reconstruction. ```bash -mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "blackvue_videos" --geotag_source_path "path/to/videos" --use_gps_start_time --interpolate_directions --video_sample_interval 0.2 +mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "blackvue_videos" --geotag_source_path "path/to/videos" --use_gps_start_time --interpolate_directions --video_sample_interval 0.2 --device_make "Blackvue" --device_model "DR900S-2CH" ``` ### GoPro From a84ea68a27b6513a9f0ef596029516da0bd2b95a Mon Sep 17 00:00:00 2001 From: Santiago Baldassin Date: Tue, 30 Oct 2018 09:48:29 -0300 Subject: [PATCH 33/36] fix: adding requests to the requirements --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 2a4fd89a..8cbbb62f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,4 @@ git+https://github.com/mapillary/Piexif redis certifi +requests From 244e5b48e39e3db5f9222cdcd39a0ae5021b0d08 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Wed, 31 Oct 2018 00:41:22 +0100 Subject: [PATCH 34/36] add: default device make when geotag source is blackvue --- mapillary_tools/commands/extract_geotag_data.py | 2 ++ mapillary_tools/commands/process.py | 2 ++ mapillary_tools/commands/process_and_upload.py | 4 +++- mapillary_tools/commands/video_process.py | 3 ++- mapillary_tools/commands/video_process_and_upload.py | 3 ++- 5 files changed, 11 insertions(+), 3 deletions(-) diff --git a/mapillary_tools/commands/extract_geotag_data.py b/mapillary_tools/commands/extract_geotag_data.py index bfe6a4a0..60480ff8 100644 --- a/mapillary_tools/commands/extract_geotag_data.py +++ b/mapillary_tools/commands/extract_geotag_data.py @@ -32,4 +32,6 @@ def add_advanced_arguments(self, parser): help="Use GPS trace starting time in case of derivating timestamp from filename.", action="store_true", default=False, required=False) def run(self, args): + if args.geotag_source == 'blackvue_videos': + args.device_make = "Blackvue" process_geotag_properties(**vars(args)) diff --git a/mapillary_tools/commands/process.py b/mapillary_tools/commands/process.py index 8e5420a5..51cb839f 100644 --- a/mapillary_tools/commands/process.py +++ b/mapillary_tools/commands/process.py @@ -119,6 +119,8 @@ def add_advanced_arguments(self, parser): def run(self, args): vars_args = vars(args) + if args.geotag_source == 'blackvue_videos': + args.device_make = "Blackvue" process_user_properties(**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_user_properties).args})) diff --git a/mapillary_tools/commands/process_and_upload.py b/mapillary_tools/commands/process_and_upload.py index f5ea8adc..5d881868 100644 --- a/mapillary_tools/commands/process_and_upload.py +++ b/mapillary_tools/commands/process_and_upload.py @@ -51,7 +51,7 @@ def add_advanced_arguments(self, parser): # geotagging parser.add_argument('--geotag_source', help='Provide the source of date/time and gps information needed for geotagging.', action='store', - choices=['exif', 'gpx', 'gopro_video', 'nmea'], default="exif", required=False) + choices=['exif', 'gpx', 'gopro_video', 'nmea', 'blackvue_videos'], default="exif", required=False) parser.add_argument( '--geotag_source_path', help='Provide the path to the file source of date/time and gps information needed for geotagging.', action='store', default=None, required=False) @@ -123,6 +123,8 @@ def add_advanced_arguments(self, parser): def run(self, args): vars_args = vars(args) + if args.geotag_source == 'blackvue_videos': + args.device_make = "Blackvue" process_user_properties(**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_user_properties).args})) diff --git a/mapillary_tools/commands/video_process.py b/mapillary_tools/commands/video_process.py index 378fb46c..61925f03 100644 --- a/mapillary_tools/commands/video_process.py +++ b/mapillary_tools/commands/video_process.py @@ -129,7 +129,8 @@ def add_advanced_arguments(self, parser): def run(self, args): vars_args = vars(args) - + if args.geotag_source == 'blackvue_videos': + args.device_make = "Blackvue" sample_video(**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(sample_video).args})) diff --git a/mapillary_tools/commands/video_process_and_upload.py b/mapillary_tools/commands/video_process_and_upload.py index 8b287c50..4978553a 100644 --- a/mapillary_tools/commands/video_process_and_upload.py +++ b/mapillary_tools/commands/video_process_and_upload.py @@ -133,7 +133,8 @@ def add_advanced_arguments(self, parser): def run(self, args): vars_args = vars(args) - + if args.geotag_source == 'blackvue_videos': + args.device_make = "Blackvue" sample_video(**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(sample_video).args})) From 78a4e9f432128c31fe91945028980de07c76dccc Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Wed, 31 Oct 2018 00:45:22 +0100 Subject: [PATCH 35/36] fix: only set default if arg is none --- mapillary_tools/commands/extract_geotag_data.py | 2 +- mapillary_tools/commands/process.py | 2 +- mapillary_tools/commands/process_and_upload.py | 2 +- mapillary_tools/commands/video_process.py | 2 +- mapillary_tools/commands/video_process_and_upload.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/mapillary_tools/commands/extract_geotag_data.py b/mapillary_tools/commands/extract_geotag_data.py index 60480ff8..30854793 100644 --- a/mapillary_tools/commands/extract_geotag_data.py +++ b/mapillary_tools/commands/extract_geotag_data.py @@ -32,6 +32,6 @@ def add_advanced_arguments(self, parser): help="Use GPS trace starting time in case of derivating timestamp from filename.", action="store_true", default=False, required=False) def run(self, args): - if args.geotag_source == 'blackvue_videos': + if args.geotag_source == 'blackvue_videos' and not args.device_make: args.device_make = "Blackvue" process_geotag_properties(**vars(args)) diff --git a/mapillary_tools/commands/process.py b/mapillary_tools/commands/process.py index 51cb839f..cb27d637 100644 --- a/mapillary_tools/commands/process.py +++ b/mapillary_tools/commands/process.py @@ -119,7 +119,7 @@ def add_advanced_arguments(self, parser): def run(self, args): vars_args = vars(args) - if args.geotag_source == 'blackvue_videos': + if args.geotag_source == 'blackvue_videos' and not args.device_make: args.device_make = "Blackvue" process_user_properties(**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_user_properties).args})) diff --git a/mapillary_tools/commands/process_and_upload.py b/mapillary_tools/commands/process_and_upload.py index 5d881868..55fce1d7 100644 --- a/mapillary_tools/commands/process_and_upload.py +++ b/mapillary_tools/commands/process_and_upload.py @@ -123,7 +123,7 @@ def add_advanced_arguments(self, parser): def run(self, args): vars_args = vars(args) - if args.geotag_source == 'blackvue_videos': + if args.geotag_source == 'blackvue_videos' and not args.device_make: args.device_make = "Blackvue" process_user_properties(**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_user_properties).args})) diff --git a/mapillary_tools/commands/video_process.py b/mapillary_tools/commands/video_process.py index 61925f03..7477f694 100644 --- a/mapillary_tools/commands/video_process.py +++ b/mapillary_tools/commands/video_process.py @@ -129,7 +129,7 @@ def add_advanced_arguments(self, parser): def run(self, args): vars_args = vars(args) - if args.geotag_source == 'blackvue_videos': + if args.geotag_source == 'blackvue_videos' and not args.device_make: args.device_make = "Blackvue" sample_video(**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(sample_video).args})) diff --git a/mapillary_tools/commands/video_process_and_upload.py b/mapillary_tools/commands/video_process_and_upload.py index 4978553a..4bf30930 100644 --- a/mapillary_tools/commands/video_process_and_upload.py +++ b/mapillary_tools/commands/video_process_and_upload.py @@ -133,7 +133,7 @@ def add_advanced_arguments(self, parser): def run(self, args): vars_args = vars(args) - if args.geotag_source == 'blackvue_videos': + if args.geotag_source == 'blackvue_videos' and not args.device_make: args.device_make = "Blackvue" sample_video(**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(sample_video).args})) From f70b0efbf09e453831212b431b2879fed58f22b1 Mon Sep 17 00:00:00 2001 From: jernejaMislej Date: Wed, 31 Oct 2018 23:54:39 +0100 Subject: [PATCH 36/36] release 0.3.0 --- CHANGELOG.MD | 14 +++ README.md | 104 +++++++++--------- mapillary_tools/commands/__init__.py | 2 +- .../process_import_meta_properties.py | 2 +- 4 files changed, 70 insertions(+), 52 deletions(-) diff --git a/CHANGELOG.MD b/CHANGELOG.MD index 5c9366a5..f9f1b08e 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -84,3 +84,17 @@ ### Bug fixes * Correct the last interpolated direction + +## 0.3.0 (2018-10-31) + +### Features and improvements +* Enable specification of a time offset in case of already geotagged images +* Add better progress bars and information output in process +* Support Blackvue videos with embedded gps data +* Add a simple `download` command to download all the blurred imaged from Mapillary for a certain `import_path` +* Support import of multiple videos + +### Breaking changes +* Argument `--video_file` was renamed to `--video_import_path` as directories of one or more videos can now be processed and uploaded. Note that even if one video is to be processed and uploaded, the directory of the video should be specified as the video import path and not the video file itself. +* Only the Image Description EXIF tag is overwritten with the mapillary image description which includes all the data obtained during the `process` command. If one would like the rest of the tags to be overwritten, an additional argument needs to be passed under advanced usage. Single specific tags can also be overwritten by additional specific corresponding arguments. + diff --git a/README.md b/README.md index 6703dfcf..daefa8f1 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ Mapillary tools is a library for processing and uploading images to Mapillary. * [Requirements](#requirements) * [Usage](#usage) * [Advanced Usage](#advanced-usage) - * [Tool Specifications](#tool-specifications) + * [Command Specifications](#command-specifications) * [Misc](#misc) @@ -74,7 +74,7 @@ To install `ffmpeg` on Windows, follow these [instructions](http://adaptivesampl ### User Authentication -To upload images to Mapillary, an account is required and can be created [here](https://www.mapillary.com/signup). When using the upload tools for the first time, user authentication is required. You will be prompted to enter your account credentials. Only Mapilary account credentials are valid for authentication in mapillary_tools, unlike in web uploads, where other account credentials can be used as well. +To upload images to Mapillary, an account is required and can be created [here](https://www.mapillary.com/signup). When using the tools for the first time, user authentication is required. You will be prompted to enter your account credentials. Only Mapilary account credentials are valid for authentication in mapillary_tools, unlike in web uploads, where other account credentials can be used as well. ### Metadata @@ -86,7 +86,7 @@ To upload videos to Mapillary, videos are required to be sampled into images and ## Usage -Upload tools can be used with the executable `mapillary_tools`, located in `mapillary_tools/mapillary_tools/bin`. +All commands are executed with `mapillary_tools`, located in `mapillary_tools/mapillary_tools/bin`. ### Execution Running the executable `mapillary_tools` is slightly different on Unix and Windows OS. @@ -108,9 +108,9 @@ On Ubuntu and MacOSX the executable is available in the PATH after installation -### Available tools +### Available commands -To see the available tools, use the following in the command line(for Windows, adjust the command according the instructions for execution): +To see the available commands, use the following in the command line(for Windows, adjust the command according the instructions for execution): ``` mapillary_tools -h @@ -119,22 +119,22 @@ Executable `mapillary_tools` takes the following arguments: `-h, --help`: Show help and exit -`--advanced`: Use the tools under an advanced level, with additional arguments and tools available +`--advanced`: Use the tools under an advanced level, with additional arguments and commands available -`tool`: Use one of the available tools: +`command`: Use one of the available commands: - `process`: Process the images including for instance, geotagging and sequence arrangement - `upload`: Upload images to Mapillary -- `process_and_upload`: A bundled tool for `process` and `upload` +- `process_and_upload`: A bundled command for `process` and `upload` -See the tool specific help for required and optional arguments: +See the command specific help for required and optional arguments: - - Show help for `process` tool: + - Show help for `process` command: ```bash mapillary_tools process -h ``` - - Show advanced help for `process` tool: + - Show advanced help for `process` command: ```bash mapillary_tools process -h --advanced @@ -146,14 +146,14 @@ mapillary_tools process -h --advanced For Windows, adjust the commands according the instructions for execution. #### Process Images -The command below processes all images in the directory and its sub-directories. It will update the images with Mapillary-specific metadata in the image EXIF for the user with user name `mapillary_user`. It requires that each image in the directory contains `capture time` and `GPS`. +The command below processes all images in the directory and its sub-directories. It will update the images with Mapillary-specific metadata in the image EXIF for the user with user name `mapillary_user`. It requires that each image in the directory contains `capture time` and `GPS`. By default, only the Image Description EXIF tag is overwritten. ```bash mapillary_tools process --import_path "path/to/images" --user_name "mapillary_user" ``` #### Upload Images -The command below uploads all images in a directory and its sub-directories. It requires Mapillary-specific metadata in the image EXIF. It works for images that are captured with Mapillary iOS or Android apps or processed with the `process` tool. +The command below uploads all images in a directory and its sub-directories. It requires Mapillary-specific metadata in the image EXIF. It works for images that are captured with Mapillary iOS or Android apps or processed with the `process` command. ```bash mapillary_tools upload --import_path "path/to/images" @@ -170,27 +170,28 @@ mapillary_tools process_and_upload --import_path "path/to/images" --user_name "m ## Advanced Usage -Available tools for advanced usage: -- Video Specific Tools: +Available commands for advanced usage: +- Video Specific Commands: - sample_video - video_process - video_process_and_upload -- Process Unit Tools: +- Process Unit Commands: - extract_user_data - extract_import_meta_data - extract_geotag_data - extract_sequence_data - extract_upload_params - exif_insert -- Other Tools: +- Other Commands: - process_csv - interpolate - authenticate - post_process + - download ### Geotag and Upload - - Run process and upload consecutively, while process is reading geotag data from a gpx track. It requires that `capture time` information is embedded in the image EXIF. You can use + - Run process and upload consecutively, while process is reading geotag data from a gpx track. It requires that `capture time` information is embedded in the image EXIF. By default geotag data is stored only in the mapillary image description, in the EXIF Image Description tag. If you would like the rest of the tags to be overwritten as well, for example to be able to place images on the map for testing purposes, you should pass an additional argument `--overwrite_all_EXIF_tags` to overwrite all EXIF tags, or in case you only want to overwrite a specific tag, like for example the GPS tag, pass argument `--overwrite_EXIF_gps_tag`. ```bash mapillary_tools process --advanced --import_path "path/to/images" --user_name username_at_mapillary --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" @@ -238,21 +239,21 @@ mapillary_tools process_and_upload --advanced --import_path "path/to/images" --u - Sample the video(s) located in `path/to/videos` into the directory `path/to/images`, at a sample interval of 0.5 seconds and tag the sampled images with `capture time`. Note that the video frames will always be sampled into sub directory `.mapillary/sampled_video_frames/"video_import_path"`, whether import path is specified or not. In case `import_path` is specified the final path for the sampled video frames will be `"import path"/.mapillary/sampled_video_frames/"video_import_path"` and in case `import_path` is not specified, the final path for the sampled video frames will be `path/to/.mapillary/sampled_video_frames/"video_import_path"`. ```bash -mapillary_tools sample_video --import_path "path/to/images" --video_import_path "path/to/videos" --video_sample_interval 0.5 --advanced +mapillary_tools sample_video --import_path "path/to/images" --video_import_path "path/to/videos" --video_sample_interval 0.5 --advanced ``` - - Sample the video(s) located in `path/to/videos`, at a sample interval of 2 seconds (default value) and tag the resulting images with `capture time`. And then process and upload the resulting images for user `username_at_mapillary`, specifying a gpx track to be the source of geotag data. + - Sample the video(s) located in `path/to/videos`, at a sample interval of 2 seconds (default value) and tag the resulting images with `capture time`. And then process and upload the resulting images for user `username_at_mapillary`, specifying a gpx track to be the source of geotag data. Additionally pass the `--overwrite_all_EXIF_tags` so the extracted frames have all the tags set beside the Image Description tag. ```bash mapillary_tools sample_video --video_import_path "path/to/videos" --advanced -mapillary_tools process --advanced --import_path "path/to/.mapillary/sampled_video_frames/video_import_path" --user_name "username_at_mapillary" --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" +mapillary_tools process --advanced --import_path "path/to/.mapillary/sampled_video_frames/video_import_path" --user_name "username_at_mapillary" --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" --overwrite_all_EXIF_tags mapillary_tools upload --import_path "path/to/.mapillary/sampled_video_frames/video_import_path" ``` or ```bash -mapillary_tools video_process_and_upload --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" +mapillary_tools video_process_and_upload --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" --overwrite_all_EXIF_tags ``` ### Process csv @@ -268,12 +269,12 @@ mapillary_tools process_csv --import_path "path/to/images" --csv_path "path/to/c mapillary_tools process_csv --import_path "path/to/images" --csv_path "path/to/csv_file" --timestamp_column 1 --meta_columns "6,7" --meta_names "random_name1,random_name2" --meta_types "double,string" --advanced ``` -## Tool Specifications +## Command Specifications ### `process` -The `process` tool will format the required and optional meta data into a Mapillary image description and insert it in the image EXIF. Images are required to contain image capture time, latitude, longitude and camera direction in the image EXIF. Under advanced usage, additional functionalities are available, for example latitude and longitude can be read from a gpx track file or a GoPro video, camera direction can be derived based on latitude and longitude, duplicates can be flagged to be excluded from the upload etc. See the tool specific help for required and optional arguments, add `--advanced` to see additional advanced optional arguments. +The `process` command will format the required and optional meta data into a Mapillary image description and insert it in the image EXIF Image Description tag. Images are required to contain image capture time, latitude, longitude and camera direction in the image EXIF. Under advanced usage, additional functionalities are available, for example latitude and longitude can be read from a gpx track file or a GoPro video, camera direction can be derived based on latitude and longitude, duplicates can be flagged to be excluded from the upload etc. See the command specific help for required and optional arguments, add `--advanced` to see additional advanced optional arguments. #### Examples @@ -290,30 +291,30 @@ mapillary_tools process --import_path "path/to/images" --user_name "mapillary_us #### Advanced Examples - - Process all images for user `mapillary_user`, in the directory `path/to/images` and its sub-directories, reading geotag data from a gpx track stored in file `path/to/gpx_file`, specifying an offset of 2 seconds between the camera and gps device, ie, camera is 2 seconds ahead of the gps device and flagging images as duplicates in case they are apart by equal or less then the default 0.1 m and differ by the camera angle by equal or less than the default 5°. + - Process all images for user `mapillary_user`, in the directory `path/to/images` and its sub-directories, reading geotag data from a gpx track stored in file `path/to/gpx_file`, specifying an offset of 2 seconds between the camera and gps device, ie, camera is 2 seconds ahead of the gps device and flagging images as duplicates in case they are apart by equal or less then the default 0.1 m and differ by the camera angle by equal or less than the default 5°. Additionally pass the `--overwrite_EXIF_gps_tag` to overwrite values with the values obtained from the gpx track. ```bash -mapillary_tools process --import_path "path/to/images" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" --offset_time 2 --flag_duplicates +mapillary_tools process --import_path "path/to/images" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" --offset_time 2 --flag_duplicates --overwrite_EXIF_gps_tag ``` - Process all images for user `mapillary_user`, in the directory `path/to/images` and its sub-directories, specifying the import to be private imagery belonging to an organization with organization username `mapillary_organization`. You can find the organization username in your dashboard. ```bash mapillary_tools process --import_path "path/to/images" --user_name "mapillary_user" --advanced --private --organization_username "mapillary_organization" ``` - - Process all images for user `mapillary_user`, in the directory `path/to/images` and its sub-directories, specifying an angle offset of 90° for the camera direction and splitting images into sequences of images apart by less than 100 meters according to image `GPS` and less than 120 seconds according to image `capture time`. + - Process all images for user `mapillary_user`, in the directory `path/to/images` and its sub-directories, specifying an angle offset of 90° for the camera direction and splitting images into sequences of images apart by less than 100 meters according to image `GPS` and less than 120 seconds according to image `capture time`. Additionally pass the `--overwrite_EXIF_direction_tag` to overwrite values with the additional specified offset. ```bash -mapillary_tools process --import_path "path/to/images" --user_name "mapillary_user" --advanced --offset_angle 90 --cutoff_distance 100 --cutoff_time 120 +mapillary_tools process --import_path "path/to/images" --user_name "mapillary_user" --advanced --offset_angle 90 --cutoff_distance 100 --cutoff_time 120 --overwrite_EXIF_direction_tag ``` ### `upload` -Images that have been successfully processed or were taken with the Mapillary app will contain the required Mapillary image description embedded in the image EXIF and can be uploaded with the `upload` tool. +Images that have been successfully processed or were taken with the Mapillary app will contain the required Mapillary image description embedded in the image EXIF and can be uploaded with the `upload` command. -The `upload` tool will collect all the images in the import path, while checking for duplicate flags, processing and uploading logs. +The `upload` command will collect all the images in the import path, while checking for duplicate flags, processing and uploading logs. If image is flagged as duplicate, was logged with failed process or logged as successfully uploaded, it will not be added to the upload list. -By default, 4 threads upload in parallel and the script retries 10 times upon encountering a failure. These can be customized with environment variables in the command line: +By default, 5 threads upload in parallel and the script retries 10 times upon encountering a failure. These can be customized by specifying additional arguments `--number_threads` and `--max_attempts` under `--advanced` usage or with environment variables in the command line: NUMBER_THREADS=2 MAX_ATTEMPTS=100 @@ -326,17 +327,15 @@ By default, 4 threads upload in parallel and the script retries 10 times upon en mapillary_tools upload --import_path "path/to/images" ``` - - upload all images in the directory `path/to/images`, while skipping its sub directories: + - upload all images in the directory `path/to/images`, while skipping its sub directories and specifying to upload with 10 threads and 10 maximum attempts: ```bash -mapillary_tools upload --import_path "path/to/images" --skip_subfolders +mapillary_tools upload --import_path "path/to/images" --skip_subfolders --number_threads 10 --max_attempts 10 --advanced ``` -This tool has no additional advanced arguments. - ### `process_and_upload` -`process_and_upload` tool will run `process` and `upload` tools consecutively with combined required and optional arguments. +`process_and_upload` command will run `process` and `upload` commands consecutively with combined required and optional arguments. #### Examples @@ -356,7 +355,7 @@ mapillary_tools process_and_upload --import_path "path/to/images" --user_name "m ### `sample_video` -`sample_video` tool will sample a video into images and insert `capture time` to the image EXIF. +`sample_video` command will sample a video into images and insert `capture time` to the image EXIF. Capture time is calculated based on the `video start time` and sampling interval. Video start time can either be extracted from the video metadata or passed as an argument `--video_start_time` (milliseconds since UNIX epoch). @@ -376,7 +375,7 @@ mapillary_tools sample_video --import_path "path/to/images" --video_import_path ### `video_process` -`video_process` tool will run `video_sample` and `process` tools consecutively with combined required and optional arguments. +`video_process` command will run `video_sample` and `process` commands consecutively with combined required and optional arguments. #### Examples @@ -388,7 +387,7 @@ mapillary_tools video_process --import_path "path/to/images" --video_import_path ### `video_process_and_upload` -`video_process_and_upload` tool will run `video_sample`, `process` and `upload` tools consecutively with combined required and optional arguments. +`video_process_and_upload` command will run `video_sample`, `process` and `upload` commands consecutively with combined required and optional arguments. #### Examples @@ -398,9 +397,9 @@ mapillary_tools video_process --import_path "path/to/images" --video_import_path mapillary_tools video_process_and_upload --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "gpx" --geotag_source_path "path/to/gpx_file" --video_sample_interval 1 --interpolate_directions ``` -### Process Unit Tools +### Process Unit Commands -Process unit tools are tools executed by the `process` tool. Usage of process unit tools requires the flag `--advanced` to be passed and might require some experience with the upload tools. +Process unit commands are commands executed by the `process` command. Usage of process unit commands requires the flag `--advanced` to be passed and might require some experience with mapillary_tools. #### `extract_user_data` @@ -424,10 +423,10 @@ Process unit tools are tools executed by the `process` tool. Usage of process un #### `exif_insert` -`exif_insert` will take all the meta data read and processed in the other processing unit tools and insert it in the image EXIF. +`exif_insert` will take all the meta data read and processed in the other processing unit commands and insert it in the image EXIF tag Image Description only, unless additional arguments are passed in order to overwrite the rest of EXIF tags as well. -### Other Tools +### Other Commands #### `authenticate` @@ -446,22 +445,26 @@ Process unit tools are tools executed by the `process` tool. Usage of process un `post_process` provides functionalities to help summarize and organize the results of the `process` and/or `upload` commands. +#### `download` + +`download` will download all blurred images from Mapillary for a certain `import_path`. + ## Camera specific ### Blackvue - - Sample one or more Blackvue videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.2 seconds, ie 5 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the Blackvue videos in `path/to/videos` and specifying camera make and model, specifying to derive camera direction based on `GPS` and use the `GPS` start time. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. Video frames will be geotagged after all the videos in the specified `video_import_path` have been sampled. In case video frames geotagging requires `rerun`, there is no need to rerun the entire `video_process` command, in case video frame extraction was successful, rerunning only the `process` command for the given `import_path` is sufficient. We encourage users to check and specify camera make and model, since it helps with camera calibration and improves 3D reconstruction. + - Sample one or more Blackvue videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.2 seconds, ie 5 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the Blackvue videos in `path/to/videos` and specifying camera make and model, specifying to derive camera direction based on `GPS` and use the `GPS` start time. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. Video frames will be geotagged after all the videos in the specified `video_import_path` have been sampled. In case video frames geotagging requires `rerun`, there is no need to rerun the entire `video_process` command, in case video frame extraction was successful, rerunning only the `process` command for the given `import_path` is sufficient. We encourage users to check and specify camera make and model, since it helps with camera calibration and improves 3D reconstruction. If you want to check the video frame placement on the map before uploading, specify `--overwrite_EXIF_gps_tag`. ```bash -mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "blackvue_videos" --geotag_source_path "path/to/videos" --use_gps_start_time --interpolate_directions --video_sample_interval 0.2 --device_make "Blackvue" --device_model "DR900S-2CH" +mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "blackvue_videos" --geotag_source_path "path/to/videos" --use_gps_start_time --interpolate_directions --video_sample_interval 0.2 --device_make "Blackvue" --device_model "DR900S-2CH" --overwrite_EXIF_gps_tag ``` ### GoPro - - Sample one or more GoPro videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.5 seconds, ie 2 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the GoPro videos in `path/to/videos` and specifying to derive camera direction based on `GPS`. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. Video frames will be geotagged after all the videos in the specified `video_import_path` have been sampled. In case video frames geotagging requires `rerun`, there is no need to rerun the entire `video_process` command, in case video frame extraction was successful, rerunning only the `process` command for the given `import_path` is sufficient. + - Sample one or more GoPro videos in directory `path/to/videos` into import path `path/to/images` at a sampling rate 0.5 seconds, ie 2 frames every second and process resulting video frames for user `mapillary_user`, reading geotag data from the GoPro videos in `path/to/videos` and specifying to derive camera direction based on `GPS`. Note that video frames will be sampled into `path/to/images/.mapillary/sampled_video_frames/"video_import_path"`. Video frames will be geotagged after all the videos in the specified `video_import_path` have been sampled. In case video frames geotagging requires `rerun`, there is no need to rerun the entire `video_process` command, in case video frame extraction was successful, rerunning only the `process` command for the given `import_path` is sufficient. If you want to check the video frame placement on the map before uploading, specify `--overwrite_EXIF_gps_tag`. ```bash -mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "gopro_video" --geotag_source_path "path/to/videos" --interpolate_directions --video_sample_interval 0.5 +mapillary_tools video_process --import_path "path/to/images" --video_import_path "path/to/videos" --user_name "mapillary_user" --advanced --geotag_source "gopro_video" --geotag_source_path "path/to/videos" --interpolate_directions --video_sample_interval 0.5 --overwrite_EXIF_gps_tag ``` ## Troubleshooting @@ -469,7 +472,7 @@ mapillary_tools video_process --import_path "path/to/images" --video_import_path In case of any issues with the installation and usage of `mapillary_tools`, check this section in case it has already been addressed, otherwise, open an issue on Github. #### General - - In case of any issues, it is always safe to try and rerun the failing command while specifying `--verbose` to see more information printed out. Uploaded images should not get uploaded more than once and should not be processed after uploading. The tool should take care of that, if it occurs otherwise, please open an issue on Github. + - In case of any issues, it is always safe to try and rerun the failing command while specifying `--verbose` to see more information printed out. Uploaded images should not get uploaded more than once and should not be processed after uploading. mapillary_tools should take care of that, if it occurs otherwise, please open an issue on Github. - Make sure you run the latest version of `mapillary_tools`, which you can check with `mapillary_tools --version`. When installing the latest version, dont forget you need to specify `--upgrade`. - Advanced user are encouraged to explore the processed data and log files in the `{image_path}/.mapillary/logs/{image_name}/` to get more insight in the failure. @@ -495,7 +498,7 @@ then in vim editor: #### Run time issues - HTTP Errors can occur due to poor network connection or high load on the import pipeline. In most cases the images eventually get uploaded regardless. But in some cases HTTP Errors can occur due to authentication issues, which can be resolved by either removing the config file with the users credentials, located in `~/.config/mapillary/config` or running the `authenticate` command available under advanced usage of `mapillary_tools`. - - Windows users sometimes have issues with the prompt not functioning. This usually results in the tools just hanging without printing anything or creating any logs in `{image_path}/.mapillary/logs/{image_name}`. In such cases authentication should be run separately with the `authentication` command, while passing `user_name`, `user_email` and `user_password` as command line arguments. This will avoid the prompt and will authenticate the user for all further usage of the `process` command. + - Windows users sometimes have issues with the prompt not functioning. This usually results in mapillary_tools just hanging without printing anything or creating any logs in `{image_path}/.mapillary/logs/{image_name}`. In such cases authentication should be run separately with the `authentication` command, while passing `user_name`, `user_email` and `user_password` as command line arguments. This will avoid the prompt and will authenticate the user for all further usage of the `process` command. - Missing required data is often the reason for failed uploads, especially if the processing included parsing external data like a gps trace. Images are aligned with a gps trace based on the image capture time and gps time, where the default assumption is that both are in UTC. Check the begin and end date of your capture and the begin and end date of the gps trace to make sure that the image capture time is in the scope of the gps trace. To correct any offset between the two capture times, you can specify `--offset_time "offset time"`. Timezone differences can result in such issues, if you know that the image capture time was stored in your current local timezone, while the gps trace is stored in UTC, specify `--local_time`. If images do not contain capture time or the capture time is unreliable, while gps time is accurate, specify `use_gps_start_time`. - In cases where the `import_path` is located on an external mount, images can potentially get overwritten, if breaking the script with Ctrl+c. To keep the images intact, you can specify `--keep_original` and all the processed data will be inserted in a copy of the original image. We are still in progress of improving this step of data import and will make sure that no image gets overwritten at any point. @@ -510,7 +513,8 @@ then in vim editor: ```bash mapillary_tools interpolate --data "identical_timestamps" --import_path "path/to/images --advanced ``` - + - If `process` includes correction of existing EXIF tag values or extraction of missing EXIF tag values from external sources and you want to test the placement on the map before uploading the images, make sure you pass `--advanced --overwrite_all_EXIF_tags` so that the rest of tags beside Image Description tag will get updated with the values obtained during `process`. + ## Misc ### Download diff --git a/mapillary_tools/commands/__init__.py b/mapillary_tools/commands/__init__.py index 83fe2bd0..9111bf24 100644 --- a/mapillary_tools/commands/__init__.py +++ b/mapillary_tools/commands/__init__.py @@ -39,7 +39,7 @@ process_and_upload ] -VERSION = "0.2.0" +VERSION = "0.3.0" def add_general_arguments(parser, command): diff --git a/mapillary_tools/process_import_meta_properties.py b/mapillary_tools/process_import_meta_properties.py index 203e6025..2a6b5252 100644 --- a/mapillary_tools/process_import_meta_properties.py +++ b/mapillary_tools/process_import_meta_properties.py @@ -103,7 +103,7 @@ def finalize_import_properties_process(image, add_meta_tag(mapillary_description, "strings", "mapillary_tools_version", - "0.2.0") + "0.3.0") if custom_meta_data: parse_and_add_custom_meta_tags(mapillary_description,