Compare commits
15 Commits
Author | SHA1 | Date |
---|---|---|
|
795ef36da5 | |
|
f5faa70bc8 | |
|
81462677a7 | |
|
6d040b4ba6 | |
|
f21331889c | |
|
501ab496ff | |
|
ed9fd7cc21 | |
|
16e62cd451 | |
|
3baa184a17 | |
|
dde2f4f66f | |
|
22e87223a3 | |
|
34e58d6a0f | |
|
e0509cdb57 | |
|
b571ba6468 | |
|
f8ae11c57f |
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
/build/
|
/build/
|
||||||
/.coverage
|
/.coverage
|
||||||
|
/diagnostics.lua
|
||||||
docs/_build
|
docs/_build
|
||||||
docs/Ordigi_data_scheme.odg
|
docs/Ordigi_data_scheme.odg
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,119 @@
|
||||||
|
<map version="freeplane 1.7.0">
|
||||||
|
<!--To view this file, download free mind mapping software Freeplane from http://freeplane.sourceforge.net -->
|
||||||
|
<node TEXT="elodie" FOLDED="false" ID="ID_577640973" CREATED="1624709002278" MODIFIED="1624709019473" STYLE="oval"><hook NAME="MapStyle">
|
||||||
|
<conditional_styles>
|
||||||
|
<conditional_style ACTIVE="true" LOCALIZED_STYLE_REF="styles.connection" LAST="false">
|
||||||
|
<node_periodic_level_condition PERIOD="2" REMAINDER="1"/>
|
||||||
|
</conditional_style>
|
||||||
|
<conditional_style ACTIVE="true" LOCALIZED_STYLE_REF="styles.topic" LAST="false">
|
||||||
|
<node_level_condition VALUE="2" MATCH_CASE="false" MATCH_APPROXIMATELY="false" COMPARATION_RESULT="0" SUCCEED="true"/>
|
||||||
|
</conditional_style>
|
||||||
|
<conditional_style ACTIVE="true" LOCALIZED_STYLE_REF="styles.subtopic" LAST="false">
|
||||||
|
<node_level_condition VALUE="4" MATCH_CASE="false" MATCH_APPROXIMATELY="false" COMPARATION_RESULT="0" SUCCEED="true"/>
|
||||||
|
</conditional_style>
|
||||||
|
<conditional_style ACTIVE="true" LOCALIZED_STYLE_REF="styles.subsubtopic" LAST="false">
|
||||||
|
<node_level_condition VALUE="6" MATCH_CASE="false" MATCH_APPROXIMATELY="false" COMPARATION_RESULT="0" SUCCEED="true"/>
|
||||||
|
</conditional_style>
|
||||||
|
</conditional_styles>
|
||||||
|
<properties edgeColorConfiguration="#808080ff,#ff0000ff,#0000ffff,#00ff00ff,#ff00ffff,#00ffffff,#7c0000ff,#00007cff,#007c00ff,#7c007cff,#007c7cff,#7c7c00ff" fit_to_viewport="false" show_note_icons="true"/>
|
||||||
|
|
||||||
|
<map_styles>
|
||||||
|
<stylenode LOCALIZED_TEXT="styles.root_node" STYLE="oval" UNIFORM_SHAPE="true" VGAP_QUANTITY="24.0 pt">
|
||||||
|
<font SIZE="24"/>
|
||||||
|
<stylenode LOCALIZED_TEXT="styles.predefined" POSITION="right" STYLE="bubble">
|
||||||
|
<stylenode LOCALIZED_TEXT="default" ICON_SIZE="12.0 pt" COLOR="#000000" STYLE="fork">
|
||||||
|
<font NAME="Arial" SIZE="10" BOLD="false" ITALIC="false"/>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="defaultstyle.details"/>
|
||||||
|
<stylenode LOCALIZED_TEXT="defaultstyle.attributes">
|
||||||
|
<font SIZE="9"/>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="defaultstyle.note" COLOR="#000000" BACKGROUND_COLOR="#ffffff" TEXT_ALIGN="LEFT"/>
|
||||||
|
<stylenode LOCALIZED_TEXT="defaultstyle.floating">
|
||||||
|
<edge STYLE="hide_edge"/>
|
||||||
|
<cloud COLOR="#f0f0f0" SHAPE="ROUND_RECT"/>
|
||||||
|
</stylenode>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="styles.user-defined" POSITION="right" STYLE="bubble">
|
||||||
|
<stylenode LOCALIZED_TEXT="styles.topic" COLOR="#18898b" STYLE="fork">
|
||||||
|
<font NAME="Liberation Sans" SIZE="10" BOLD="true"/>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="styles.subtopic" COLOR="#cc3300" STYLE="fork">
|
||||||
|
<font NAME="Liberation Sans" SIZE="10" BOLD="true"/>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="styles.subsubtopic" COLOR="#669900">
|
||||||
|
<font NAME="Liberation Sans" SIZE="10" BOLD="true"/>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="styles.connection" COLOR="#606060" STYLE="fork">
|
||||||
|
<font NAME="Arial" SIZE="8" BOLD="false"/>
|
||||||
|
</stylenode>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="styles.AutomaticLayout" POSITION="right" STYLE="bubble">
|
||||||
|
<stylenode LOCALIZED_TEXT="AutomaticLayout.level.root" COLOR="#000000" STYLE="oval">
|
||||||
|
<font SIZE="18"/>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="AutomaticLayout.level,1" COLOR="#0033ff">
|
||||||
|
<font SIZE="16"/>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="AutomaticLayout.level,2" COLOR="#00b439">
|
||||||
|
<font SIZE="14"/>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="AutomaticLayout.level,3" COLOR="#990000">
|
||||||
|
<font SIZE="12"/>
|
||||||
|
</stylenode>
|
||||||
|
<stylenode LOCALIZED_TEXT="AutomaticLayout.level,4" COLOR="#111111">
|
||||||
|
<font SIZE="10"/>
|
||||||
|
</stylenode>
|
||||||
|
</stylenode>
|
||||||
|
</stylenode>
|
||||||
|
</map_styles>
|
||||||
|
</hook>
|
||||||
|
<node TEXT="import" POSITION="right" ID="ID_1958811617" CREATED="1624709031603" MODIFIED="1624710428698"><richcontent TYPE="NOTE">
|
||||||
|
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<p>
|
||||||
|
Import from external source
|
||||||
|
</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
|
</richcontent>
|
||||||
|
<node TEXT="--update" ID="ID_1408411362" CREATED="1624710635676" MODIFIED="1624710643751"/>
|
||||||
|
</node>
|
||||||
|
<node TEXT="update" POSITION="right" ID="ID_200299843" CREATED="1624709041259" MODIFIED="1624710451112"><richcontent TYPE="NOTE">
|
||||||
|
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<p>
|
||||||
|
Update metadata
|
||||||
|
</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
|
</richcontent>
|
||||||
|
</node>
|
||||||
|
<node TEXT="sort" FOLDED="true" POSITION="right" ID="ID_474160274" CREATED="1624709213958" MODIFIED="1624710465196"><richcontent TYPE="NOTE">
|
||||||
|
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<p>
|
||||||
|
Sort photo
|
||||||
|
</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
|
</richcontent>
|
||||||
|
<node TEXT="sort files" ID="ID_1215066925" CREATED="1624709364728" MODIFIED="1624709367203"/>
|
||||||
|
</node>
|
||||||
|
</node>
|
||||||
|
</map>
|
|
@ -0,0 +1,241 @@
|
||||||
|
|
||||||
|
# Name ideas
|
||||||
|
dozo
|
||||||
|
fog
|
||||||
|
mtool
|
||||||
|
ordigi
|
||||||
|
|
||||||
|
# Geocoders
|
||||||
|
- Pelias
|
||||||
|
- Photon
|
||||||
|
- Nominatium
|
||||||
|
|
||||||
|
# TEST
|
||||||
|
|
||||||
|
def get_exif(filename):
|
||||||
|
image = Image.open(filename)
|
||||||
|
image.verify()
|
||||||
|
return image._getexif()
|
||||||
|
|
||||||
|
def get_geotagging(exif):
|
||||||
|
if not exif:
|
||||||
|
raise ValueError("No EXIF metadata found")
|
||||||
|
|
||||||
|
geotagging = {}
|
||||||
|
for (idx, tag) in TAGS.items():
|
||||||
|
if tag == 'GPSInfo':
|
||||||
|
if idx not in exif:
|
||||||
|
raise ValueError("No EXIF geotagging found")
|
||||||
|
|
||||||
|
for (key, val) in GPSTAGS.items():
|
||||||
|
if key in exif[idx]:
|
||||||
|
geotagging[val] = exif[idx][key]
|
||||||
|
|
||||||
|
return geotagging
|
||||||
|
get_geotagging(exif)
|
||||||
|
from PIL.ExifTags import TAGS
|
||||||
|
|
||||||
|
def get_labeled_exif(exif):
|
||||||
|
labeled = {}
|
||||||
|
for (key, val) in exif.items():
|
||||||
|
labeled[TAGS.get(key)] = val
|
||||||
|
|
||||||
|
return labeled
|
||||||
|
get_geotagging(exif)
|
||||||
|
from PIL.ExifTags import GPSTAGS
|
||||||
|
get_geotagging(exif)
|
||||||
|
geotags = get_geotagging(exif)
|
||||||
|
get_location(geotags)
|
||||||
|
|
||||||
|
def get_decimal_from_dms(dms, ref):
|
||||||
|
|
||||||
|
degrees = dms[0][0] / dms[0][1]
|
||||||
|
minutes = dms[1][0] / dms[1][1] / 60.0
|
||||||
|
seconds = dms[2][0] / dms[2][1] / 3600.0
|
||||||
|
|
||||||
|
if ref in ['S', 'W']:
|
||||||
|
degrees = -degrees
|
||||||
|
minutes = -minutes
|
||||||
|
seconds = -seconds
|
||||||
|
|
||||||
|
return round(degrees + minutes + seconds, 5)
|
||||||
|
|
||||||
|
def get_coordinates(geotags):
|
||||||
|
lat = get_decimal_from_dms(geotags['GPSLatitude'], geotags['GPSLatitudeRef'])
|
||||||
|
|
||||||
|
lon = get_decimal_from_dms(geotags['GPSLongitude'], geotags['GPSLongitudeRef'])
|
||||||
|
|
||||||
|
return (lat,lon)
|
||||||
|
|
||||||
|
def get_geotagging(exif):
|
||||||
|
if not exif:
|
||||||
|
raise ValueError("No EXIF metadata found")
|
||||||
|
|
||||||
|
geotagging = {}
|
||||||
|
for (idx, tag) in TAGS.items():
|
||||||
|
if tag == 'GPSInfo':
|
||||||
|
if idx not in exif:
|
||||||
|
raise ValueError("No EXIF geotagging found")
|
||||||
|
|
||||||
|
for (key, val) in GPSTAGS.items():
|
||||||
|
if key in exif[idx]:
|
||||||
|
geotagging[val] = exif[idx][key]
|
||||||
|
|
||||||
|
return geotagging
|
||||||
|
|
||||||
|
def get_decimal_from_dms(dms, ref):
|
||||||
|
|
||||||
|
degrees = dms[0]
|
||||||
|
minutes = dms[1] / 60.0
|
||||||
|
seconds = dms[2] / 3600.0
|
||||||
|
|
||||||
|
if ref in ['S', 'W']:
|
||||||
|
degrees = -degrees
|
||||||
|
minutes = -minutes
|
||||||
|
seconds = -seconds
|
||||||
|
|
||||||
|
return round(degrees + minutes + seconds, 5)
|
||||||
|
headers = {}
|
||||||
|
params = {
|
||||||
|
'apiKey': os.environ['API_KEY'],
|
||||||
|
'at': "%s,%s" % coords,
|
||||||
|
'lang': 'en-US',
|
||||||
|
'limit': 1,
|
||||||
|
}
|
||||||
|
78/41: headers = {}
|
||||||
|
78/42:
|
||||||
|
params = {
|
||||||
|
'apiKey': os.environ['API_KEY'],
|
||||||
|
'at': "%s,%s" % coords,
|
||||||
|
'lang': 'en-US',
|
||||||
|
'limit': 1,
|
||||||
|
}
|
||||||
|
78/43:
|
||||||
|
params = {
|
||||||
|
'apiKey': os.environ['API_KEY'],
|
||||||
|
'at': "%s,%s" % coords,
|
||||||
|
'lang': 'en-US',
|
||||||
|
'limit': 1,
|
||||||
|
}
|
||||||
|
78/44: API_KEY=m5aGo8xGe4LLhxeKZYpHr2MPXGN2aDhe
|
||||||
|
78/45: API_KEY='m5aGo8xGe4LLhxeKZYpHr2MPXGN2aDhe'
|
||||||
|
78/46:
|
||||||
|
params = {
|
||||||
|
'apiKey': os.environ['API_KEY'],
|
||||||
|
'at': "%s,%s" % coords,
|
||||||
|
'lang': 'en-US',
|
||||||
|
'limit': 1,
|
||||||
|
}
|
||||||
|
78/47: API_KEY='m5aGo8xGe4LLhxeKZYpHr2MPXGN2aDhe'
|
||||||
|
78/48:
|
||||||
|
params = {
|
||||||
|
'apiKey': os.environ['API_KEY'],
|
||||||
|
'at': "%s,%s" % coords,
|
||||||
|
'lang': 'en-US',
|
||||||
|
'limit': 1,
|
||||||
|
}
|
||||||
|
78/49:
|
||||||
|
params = {
|
||||||
|
'apiKey': os.environ['m5aGo8xGe4LLhxeKZYpHr2MPXGN2aDhe'],
|
||||||
|
'at': "%s,%s" % coords,
|
||||||
|
'lang': 'en-US',
|
||||||
|
'limit': 1,
|
||||||
|
}
|
||||||
|
78/50: %load_ext autotime
|
||||||
|
78/51:
|
||||||
|
import pandas as pd
|
||||||
|
import geopandas as gpd
|
||||||
|
import geopy
|
||||||
|
from geopy.geocoders import Nominatim
|
||||||
|
from geopy.extra.rate_limiter import RateLimiterimport matplotlib.pyplot as plt
|
||||||
|
import plotly_express as pximport tqdm
|
||||||
|
from tqdm._tqdm_notebook import tqdm_notebook
|
||||||
|
78/52:
|
||||||
|
import pandas as pd
|
||||||
|
import geopandas as gpd
|
||||||
|
import geopy
|
||||||
|
from geopy.geocoders import Nominatim
|
||||||
|
from geopy.extra.rate_limiter import RateLimiterimport matplotlib.pyplot as plt
|
||||||
|
import plotly_express as px
|
||||||
|
import pandas as pd
|
||||||
|
import geopandas as gpd
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
filename='2021-02-24_09-33-29-20210305_081001_01.mp4'
|
||||||
|
def get_exif(filename):
|
||||||
|
image = Image.open(filename)
|
||||||
|
image.verify()
|
||||||
|
return image._getexif()
|
||||||
|
exif=get_exif(filename)
|
||||||
|
|
||||||
|
from PIL.ExifTags import TAGS
|
||||||
|
from PIL.ExifTags import GPSTAGS
|
||||||
|
def get_geotagging(exif):
|
||||||
|
if not exif:
|
||||||
|
raise ValueError("No EXIF metadata found")
|
||||||
|
|
||||||
|
geotagging = {}
|
||||||
|
for (idx, tag) in TAGS.items():
|
||||||
|
if tag == 'GPSInfo':
|
||||||
|
if idx not in exif:
|
||||||
|
raise ValueError("No EXIF geotagging found")
|
||||||
|
|
||||||
|
for (key, val) in GPSTAGS.items():
|
||||||
|
if key in exif[idx]:
|
||||||
|
geotagging[val] = exif[idx][key]
|
||||||
|
|
||||||
|
return geotagging
|
||||||
|
geotags = get_geotagging(exif)
|
||||||
|
import os
|
||||||
|
import requests
|
||||||
|
|
||||||
|
def get_location(geotags):
|
||||||
|
coords = get_coordinates(geotags)
|
||||||
|
|
||||||
|
uri = 'https://revgeocode.search.hereapi.com/v1/revgeocode'
|
||||||
|
headers = {}
|
||||||
|
params = {
|
||||||
|
'apiKey': os.environ['API_KEY'],
|
||||||
|
'at': "%s,%s" % coords,
|
||||||
|
'lang': 'en-US',
|
||||||
|
'limit': 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.get(uri, headers=headers, params=params)
|
||||||
|
try:
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
except requests.exceptions.HTTPError as e:
|
||||||
|
print(str(e))
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def get_coordinates(geotags):
|
||||||
|
lat = get_decimal_from_dms(geotags['GPSLatitude'], geotags['GPSLatitudeRef'])
|
||||||
|
|
||||||
|
lon = get_decimal_from_dms(geotags['GPSLongitude'], geotags['GPSLongitudeRef'])
|
||||||
|
|
||||||
|
return (lat,lon)
|
||||||
|
coords = get_coordinates(geotags)
|
||||||
|
import geopy
|
||||||
|
from geopy.geocoders import Nominatim
|
||||||
|
locator = Nominatim(user_agent='myGeocoder')
|
||||||
|
# coordinates ='53.480837, -2.244914'
|
||||||
|
lat='45.58339'
|
||||||
|
lon='4.79823'
|
||||||
|
coords = lat + ',' + lon
|
||||||
|
locator.reverse(coords)
|
||||||
|
location =locator.reverse(coords)
|
||||||
|
location.address.split(',')
|
||||||
|
city=location.address.split(',')[1].strip()
|
||||||
|
country=location.address.split(',')[7].strip()
|
||||||
|
location.raw
|
||||||
|
rint
|
||||||
|
country=location.raw['address']['country']
|
||||||
|
city=location.raw['address']['village']
|
18
ordigi.conf
18
ordigi.conf
|
@ -1,5 +1,17 @@
|
||||||
|
[Exif]
|
||||||
|
#album_from_folder=False
|
||||||
|
fill_date_original=True
|
||||||
|
#cache=True
|
||||||
|
#ignore_tags=None
|
||||||
|
use_date_filename=True
|
||||||
|
#use_file_dates=False
|
||||||
|
|
||||||
[Filters]
|
[Filters]
|
||||||
exclude=["**/.directory", "**/.DS_Store"]
|
exclude=["**/.directory", "**/.DS_Store"]
|
||||||
|
#extensions=None
|
||||||
|
#glob=**/*
|
||||||
|
#max_deep=None
|
||||||
|
remove_duplicates=True
|
||||||
|
|
||||||
[Geolocation]
|
[Geolocation]
|
||||||
geocoder=Nominatim
|
geocoder=Nominatim
|
||||||
|
@ -15,5 +27,9 @@ day_begins=4
|
||||||
|
|
||||||
# Path format
|
# Path format
|
||||||
dirs_path=<%Y>/<%m-%b>_<location>_<folder>
|
dirs_path=<%Y>/<%m-%b>_<location>_<folder>
|
||||||
name=<%Y%m%d-%H%M%S>_<<original_name>|<name>>.%l<ext>
|
name=<%Y%m%d-%H%M%S>_<<name>.%l<ext>|<original_name>>
|
||||||
# name=<%Y%m%d-%H%M%S>-%u<original_name>.%l<ext>
|
# name=<%Y%m%d-%H%M%S>-%u<original_name>.%l<ext>
|
||||||
|
|
||||||
|
[Terminal]
|
||||||
|
dry_run=False
|
||||||
|
interactive=False
|
||||||
|
|
|
@ -5,8 +5,9 @@ import sys
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from ordigi import constants, log, LOG
|
from ordigi import log, LOG
|
||||||
from ordigi.collection import Collection
|
from ordigi.collection import Collection
|
||||||
|
from ordigi import constants
|
||||||
from ordigi.geolocation import GeoLocation
|
from ordigi.geolocation import GeoLocation
|
||||||
from ordigi import utils
|
from ordigi import utils
|
||||||
|
|
||||||
|
@ -85,7 +86,7 @@ _sort_options = [
|
||||||
click.option(
|
click.option(
|
||||||
'--path-format',
|
'--path-format',
|
||||||
'-p',
|
'-p',
|
||||||
default=None,
|
default=constants.DEFAULT_PATH_FORMAT,
|
||||||
help='Custom featured path format',
|
help='Custom featured path format',
|
||||||
),
|
),
|
||||||
click.option(
|
click.option(
|
||||||
|
@ -147,15 +148,10 @@ def _cli_get_location(collection):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _cli_sort(collection, src_paths, import_mode, remove_duplicates):
|
def _cli_sort(collection, src_paths, import_mode):
|
||||||
loc = _cli_get_location(collection)
|
loc = _cli_get_location(collection)
|
||||||
|
|
||||||
path_format = collection.opt['Path']['path_format']
|
return collection.sort_files(src_paths, loc, import_mode)
|
||||||
LOG.debug(f'path_format: {path_format}')
|
|
||||||
|
|
||||||
return collection.sort_files(
|
|
||||||
src_paths, path_format, loc, import_mode, remove_duplicates
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
|
@ -182,6 +178,7 @@ def _check(**kwargs):
|
||||||
if log_level < 30:
|
if log_level < 30:
|
||||||
summary.print()
|
summary.print()
|
||||||
if summary.errors:
|
if summary.errors:
|
||||||
|
LOG.error('Db data is not accurate run `ordigi update --checksum`')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
LOG.error('Db data is not accurate run `ordigi update`')
|
LOG.error('Db data is not accurate run `ordigi update`')
|
||||||
|
@ -218,7 +215,7 @@ def _check(**kwargs):
|
||||||
@click.argument('subdirs', required=False, nargs=-1, type=click.Path())
|
@click.argument('subdirs', required=False, nargs=-1, type=click.Path())
|
||||||
@click.argument('collection', required=True, nargs=1, type=click.Path())
|
@click.argument('collection', required=True, nargs=1, type=click.Path())
|
||||||
def _clean(**kwargs):
|
def _clean(**kwargs):
|
||||||
"""Remove empty folders"""
|
"""Clean media collection"""
|
||||||
|
|
||||||
folders = kwargs['folders']
|
folders = kwargs['folders']
|
||||||
log_level = log.get_level(kwargs['verbose'])
|
log_level = log.get_level(kwargs['verbose'])
|
||||||
|
@ -231,24 +228,21 @@ def _clean(**kwargs):
|
||||||
collection = Collection(
|
collection = Collection(
|
||||||
root,
|
root,
|
||||||
{
|
{
|
||||||
"dry_run": kwargs['dry_run'],
|
'dry_run': kwargs['dry_run'],
|
||||||
"extensions": kwargs['ext'],
|
'extensions': kwargs['ext'],
|
||||||
"glob": kwargs['glob'],
|
'glob': kwargs['glob'],
|
||||||
|
'remove_duplicates': kwargs['remove_duplicates'],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
# os.path.join(
|
# os.path.join(
|
||||||
# TODO make function to remove duplicates
|
# TODO make function to remove duplicates
|
||||||
# path_format = collection.opt['Path']['path_format']
|
# path_format = collection.opt['Path']['path_format']
|
||||||
# summary = collection.sort_files(
|
# summary = collection.sort_files(paths, None)
|
||||||
# paths, path_format, None, remove_duplicates=kwargs['remove_duplicates']
|
|
||||||
# )
|
|
||||||
|
|
||||||
if kwargs['path_string']:
|
if kwargs['path_string']:
|
||||||
dedup_regex = set(kwargs['dedup_regex'])
|
dedup_regex = set(kwargs['dedup_regex'])
|
||||||
collection.dedup_path(
|
collection.dedup_path(paths, dedup_regex)
|
||||||
paths, dedup_regex, kwargs['remove_duplicates']
|
|
||||||
)
|
|
||||||
|
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if folders:
|
if folders:
|
||||||
|
@ -334,9 +328,10 @@ def _compare(**kwargs):
|
||||||
collection = Collection(
|
collection = Collection(
|
||||||
root,
|
root,
|
||||||
{
|
{
|
||||||
"extensions": kwargs['ext'],
|
'extensions': kwargs['ext'],
|
||||||
"glob": kwargs['glob'],
|
'glob': kwargs['glob'],
|
||||||
"dry_run": kwargs['dry_run'],
|
'dry_run': kwargs['dry_run'],
|
||||||
|
'remove_duplicates': kwargs['remove_duplicates'],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -509,7 +504,7 @@ def _import(**kwargs):
|
||||||
'dry_run': kwargs['dry_run'],
|
'dry_run': kwargs['dry_run'],
|
||||||
'interactive': kwargs['interactive'],
|
'interactive': kwargs['interactive'],
|
||||||
'path_format': kwargs['path_format'],
|
'path_format': kwargs['path_format'],
|
||||||
|
'remove_duplicates': kwargs['remove_duplicates'],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -517,7 +512,7 @@ def _import(**kwargs):
|
||||||
import_mode = 'copy'
|
import_mode = 'copy'
|
||||||
else:
|
else:
|
||||||
import_mode = 'move'
|
import_mode = 'move'
|
||||||
summary = _cli_sort(collection, src_paths, import_mode, kwargs['remove_duplicates'])
|
summary = _cli_sort(collection, src_paths, import_mode)
|
||||||
|
|
||||||
if log_level < 30:
|
if log_level < 30:
|
||||||
summary.print()
|
summary.print()
|
||||||
|
@ -566,10 +561,11 @@ def _sort(**kwargs):
|
||||||
'glob': kwargs['glob'],
|
'glob': kwargs['glob'],
|
||||||
'dry_run': kwargs['dry_run'],
|
'dry_run': kwargs['dry_run'],
|
||||||
'interactive': kwargs['interactive'],
|
'interactive': kwargs['interactive'],
|
||||||
|
'remove_duplicates': kwargs['remove_duplicates'],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
summary = _cli_sort(collection, paths, False, kwargs['remove_duplicates'])
|
summary = _cli_sort(collection, paths, False)
|
||||||
|
|
||||||
if kwargs['clean']:
|
if kwargs['clean']:
|
||||||
collection.remove_empty_folders(root)
|
collection.remove_empty_folders(root)
|
||||||
|
@ -583,6 +579,13 @@ def _sort(**kwargs):
|
||||||
|
|
||||||
@cli.command('update')
|
@cli.command('update')
|
||||||
@add_options(_logger_options)
|
@add_options(_logger_options)
|
||||||
|
@click.option(
|
||||||
|
'--checksum',
|
||||||
|
'-c',
|
||||||
|
default=False,
|
||||||
|
is_flag=True,
|
||||||
|
help='Update checksum, assuming file are changed by the user',
|
||||||
|
)
|
||||||
@click.argument('path', required=True, nargs=1, type=click.Path())
|
@click.argument('path', required=True, nargs=1, type=click.Path())
|
||||||
def _update(**kwargs):
|
def _update(**kwargs):
|
||||||
"""
|
"""
|
||||||
|
@ -594,7 +597,7 @@ def _update(**kwargs):
|
||||||
|
|
||||||
collection = Collection(root)
|
collection = Collection(root)
|
||||||
loc = _cli_get_location(collection)
|
loc = _cli_get_location(collection)
|
||||||
summary = collection.update(loc)
|
summary = collection.update(loc, kwargs['checksum'])
|
||||||
|
|
||||||
if log_level < 30:
|
if log_level < 30:
|
||||||
summary.print()
|
summary.print()
|
||||||
|
|
|
@ -138,8 +138,9 @@ class FPath:
|
||||||
elif item == 'name':
|
elif item == 'name':
|
||||||
# Remove date prefix added to the name.
|
# Remove date prefix added to the name.
|
||||||
part = stem
|
part = stem
|
||||||
for regex in utils.get_date_regex().values():
|
date_filename, regex, sep = utils.get_date_from_string(stem)
|
||||||
part = re.sub(regex, '', part)
|
if date_filename:
|
||||||
|
part = re.sub(regex, sep, part)
|
||||||
# Delete separator
|
# Delete separator
|
||||||
if re.search('^[-_ .]', part):
|
if re.search('^[-_ .]', part):
|
||||||
part = part[1:]
|
part = part[1:]
|
||||||
|
@ -272,7 +273,7 @@ class CollectionDb:
|
||||||
def __init__(self, root):
|
def __init__(self, root):
|
||||||
self.sqlite = Sqlite(root)
|
self.sqlite = Sqlite(root)
|
||||||
|
|
||||||
def _set_row_data(self, table, metadata):
|
def _get_row_data(self, table, metadata):
|
||||||
row_data = {}
|
row_data = {}
|
||||||
for title in self.sqlite.tables[table]['header']:
|
for title in self.sqlite.tables[table]['header']:
|
||||||
key = utils.camel2snake(title)
|
key = utils.camel2snake(title)
|
||||||
|
@ -283,11 +284,11 @@ class CollectionDb:
|
||||||
def add_file_data(self, metadata):
|
def add_file_data(self, metadata):
|
||||||
"""Save metadata informations to db"""
|
"""Save metadata informations to db"""
|
||||||
if metadata['latitude'] and metadata['longitude']:
|
if metadata['latitude'] and metadata['longitude']:
|
||||||
loc_values = self._set_row_data('location', metadata)
|
loc_values = self._get_row_data('location', metadata)
|
||||||
metadata['location_id'] = self.sqlite.upsert_location(loc_values)
|
metadata['location_id'] = self.sqlite.upsert_location(loc_values)
|
||||||
|
|
||||||
if metadata['file_path']:
|
if metadata['file_path']:
|
||||||
row_data = self._set_row_data('metadata', metadata)
|
row_data = self._get_row_data('metadata', metadata)
|
||||||
self.sqlite.upsert_metadata(row_data)
|
self.sqlite.upsert_metadata(row_data)
|
||||||
|
|
||||||
|
|
||||||
|
@ -359,7 +360,7 @@ class Paths:
|
||||||
:return: Path path
|
:return: Path path
|
||||||
"""
|
"""
|
||||||
if not path.exists():
|
if not path.exists():
|
||||||
self.log.error(f'Directory {path} does not exist')
|
self.log.error(f'Path {path} does not exist')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
@ -476,6 +477,7 @@ class SortMedias:
|
||||||
db=None,
|
db=None,
|
||||||
dry_run=False,
|
dry_run=False,
|
||||||
interactive=False,
|
interactive=False,
|
||||||
|
remove_duplicates=False,
|
||||||
):
|
):
|
||||||
|
|
||||||
# Arguments
|
# Arguments
|
||||||
|
@ -488,6 +490,7 @@ class SortMedias:
|
||||||
self.dry_run = dry_run
|
self.dry_run = dry_run
|
||||||
self.interactive = interactive
|
self.interactive = interactive
|
||||||
self.log = LOG.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
|
self.remove_duplicates = remove_duplicates
|
||||||
self.summary = Summary(self.root)
|
self.summary = Summary(self.root)
|
||||||
|
|
||||||
# Attributes
|
# Attributes
|
||||||
|
@ -519,7 +522,7 @@ class SortMedias:
|
||||||
|
|
||||||
# change media file_path to dest_path
|
# change media file_path to dest_path
|
||||||
if not self.dry_run:
|
if not self.dry_run:
|
||||||
updated = self.medias.update_exif_data(metadata)
|
updated = self.medias.update_exif_data(metadata, imp)
|
||||||
if updated:
|
if updated:
|
||||||
checksum = utils.checksum(dest_path)
|
checksum = utils.checksum(dest_path)
|
||||||
metadata['checksum'] = checksum
|
metadata['checksum'] = checksum
|
||||||
|
@ -599,7 +602,7 @@ class SortMedias:
|
||||||
directory_path.mkdir(parents=True, exist_ok=True)
|
directory_path.mkdir(parents=True, exist_ok=True)
|
||||||
self.log.info(f'Create {directory_path}')
|
self.log.info(f'Create {directory_path}')
|
||||||
|
|
||||||
def check_conflicts(self, src_path, dest_path, remove_duplicates=False):
|
def check_conflicts(self, src_path, dest_path):
|
||||||
"""
|
"""
|
||||||
Check if file can be copied or moved file to dest_path.
|
Check if file can be copied or moved file to dest_path.
|
||||||
"""
|
"""
|
||||||
|
@ -615,7 +618,7 @@ class SortMedias:
|
||||||
|
|
||||||
if dest_path.is_file():
|
if dest_path.is_file():
|
||||||
self.log.info(f"File {dest_path} already exist")
|
self.log.info(f"File {dest_path} already exist")
|
||||||
if remove_duplicates:
|
if self.remove_duplicates:
|
||||||
if filecmp.cmp(src_path, dest_path):
|
if filecmp.cmp(src_path, dest_path):
|
||||||
self.log.info(
|
self.log.info(
|
||||||
"File in source and destination are identical. Duplicate will be ignored."
|
"File in source and destination are identical. Duplicate will be ignored."
|
||||||
|
@ -632,15 +635,15 @@ class SortMedias:
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def _solve_conflicts(self, conflicts, remove_duplicates):
|
def _solve_conflicts(self, conflicts):
|
||||||
unresolved_conflicts = []
|
unresolved_conflicts = []
|
||||||
while conflicts != []:
|
while conflicts != []:
|
||||||
src_path, dest_path, metadata = conflicts.pop()
|
src_path, dest_path, metadata = conflicts.pop()
|
||||||
# Check for conflict status again in case is has changed
|
# Check for conflict status again in case is has changed
|
||||||
|
|
||||||
conflict = self.check_conflicts(src_path, dest_path, remove_duplicates)
|
conflict = self.check_conflicts(src_path, dest_path)
|
||||||
|
|
||||||
for i in range(1, 100):
|
for i in range(1, 1000):
|
||||||
if conflict != 1:
|
if conflict != 1:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -651,7 +654,7 @@ class SortMedias:
|
||||||
else:
|
else:
|
||||||
stem = dest_path.stem
|
stem = dest_path.stem
|
||||||
dest_path = dest_path.parent / (stem + '_' + str(i) + suffix)
|
dest_path = dest_path.parent / (stem + '_' + str(i) + suffix)
|
||||||
conflict = self.check_conflicts(src_path, dest_path, remove_duplicates)
|
conflict = self.check_conflicts(src_path, dest_path)
|
||||||
|
|
||||||
if conflict == 1:
|
if conflict == 1:
|
||||||
# i = 100:
|
# i = 100:
|
||||||
|
@ -662,7 +665,7 @@ class SortMedias:
|
||||||
|
|
||||||
yield (src_path, dest_path, metadata), conflict
|
yield (src_path, dest_path, metadata), conflict
|
||||||
|
|
||||||
def sort_medias(self, imp=False, remove_duplicates=False):
|
def sort_medias(self, imp=False):
|
||||||
"""
|
"""
|
||||||
sort files and solve conflicts
|
sort files and solve conflicts
|
||||||
"""
|
"""
|
||||||
|
@ -673,7 +676,7 @@ class SortMedias:
|
||||||
for src_path, metadata in self.medias.datas.items():
|
for src_path, metadata in self.medias.datas.items():
|
||||||
dest_path = self.root / metadata['file_path']
|
dest_path = self.root / metadata['file_path']
|
||||||
|
|
||||||
conflict = self.check_conflicts(src_path, dest_path, remove_duplicates)
|
conflict = self.check_conflicts(src_path, dest_path)
|
||||||
|
|
||||||
if not conflict:
|
if not conflict:
|
||||||
self.sort_file(
|
self.sort_file(
|
||||||
|
@ -691,9 +694,7 @@ class SortMedias:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if conflicts != []:
|
if conflicts != []:
|
||||||
for files_data, conflict in self._solve_conflicts(
|
for files_data, conflict in self._solve_conflicts(conflicts):
|
||||||
conflicts, remove_duplicates
|
|
||||||
):
|
|
||||||
|
|
||||||
src_path, dest_path, metadata = files_data
|
src_path, dest_path, metadata = files_data
|
||||||
if not conflict:
|
if not conflict:
|
||||||
|
@ -726,13 +727,13 @@ class Collection(SortMedias):
|
||||||
self.log = LOG.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
|
|
||||||
# Get config options
|
# Get config options
|
||||||
self.opt = self.get_config_options()
|
self.opt, default_options = self.get_config_options()
|
||||||
|
|
||||||
# Set client options
|
# Set client options
|
||||||
for option, value in cli_options.items():
|
for option, value in cli_options.items():
|
||||||
if value not in (None, set()):
|
|
||||||
for section in self.opt:
|
for section in self.opt:
|
||||||
if option in self.opt[section]:
|
if option in self.opt[section]:
|
||||||
|
if value != default_options[section][option]:
|
||||||
if option == 'exclude':
|
if option == 'exclude':
|
||||||
self.opt[section][option].union(set(value))
|
self.opt[section][option].union(set(value))
|
||||||
elif option in ('ignore_tags', 'extensions'):
|
elif option in ('ignore_tags', 'extensions'):
|
||||||
|
@ -771,6 +772,7 @@ class Collection(SortMedias):
|
||||||
self.db,
|
self.db,
|
||||||
self.opt['Terminal']['dry_run'],
|
self.opt['Terminal']['dry_run'],
|
||||||
self.opt['Terminal']['interactive'],
|
self.opt['Terminal']['interactive'],
|
||||||
|
self.opt['Filters']['remove_duplicates'],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Attributes
|
# Attributes
|
||||||
|
@ -791,7 +793,7 @@ class Collection(SortMedias):
|
||||||
"""Get collection config"""
|
"""Get collection config"""
|
||||||
config = Config(self.root.joinpath('.ordigi', 'ordigi.conf'))
|
config = Config(self.root.joinpath('.ordigi', 'ordigi.conf'))
|
||||||
|
|
||||||
return config.get_config_options()
|
return config.get_config_options(), config.get_default_options()
|
||||||
|
|
||||||
def _set_option(self, section, option, cli_option):
|
def _set_option(self, section, option, cli_option):
|
||||||
"""if client option is set overwrite collection option value"""
|
"""if client option is set overwrite collection option value"""
|
||||||
|
@ -817,7 +819,7 @@ class Collection(SortMedias):
|
||||||
def init(self, loc):
|
def init(self, loc):
|
||||||
"""Init collection db"""
|
"""Init collection db"""
|
||||||
for file_path in self.get_collection_files():
|
for file_path in self.get_collection_files():
|
||||||
metadata = self.medias.get_metadata(file_path, self.root, loc)
|
metadata = self.medias.get_metadata(file_path, self.root, loc=loc)
|
||||||
metadata['file_path'] = os.path.relpath(file_path, self.root)
|
metadata['file_path'] = os.path.relpath(file_path, self.root)
|
||||||
|
|
||||||
self.db.add_file_data(metadata)
|
self.db.add_file_data(metadata)
|
||||||
|
@ -825,6 +827,46 @@ class Collection(SortMedias):
|
||||||
|
|
||||||
return self.summary
|
return self.summary
|
||||||
|
|
||||||
|
def check_files(self):
|
||||||
|
"""Check file integrity."""
|
||||||
|
for file_path in self.paths.get_files(self.root):
|
||||||
|
checksum = utils.checksum(file_path)
|
||||||
|
relpath = file_path.relative_to(self.root)
|
||||||
|
if checksum == self.db.sqlite.get_checksum(relpath):
|
||||||
|
self.summary.append('check', True, file_path)
|
||||||
|
else:
|
||||||
|
self.log.error(f'{file_path} is corrupted')
|
||||||
|
self.summary.append('check', False, file_path)
|
||||||
|
|
||||||
|
return self.summary
|
||||||
|
|
||||||
|
def file_in_db(self, file_path, db_rows):
|
||||||
|
# Assuming file_path are inside collection root dir
|
||||||
|
relpath = os.path.relpath(file_path, self.root)
|
||||||
|
|
||||||
|
# If file not in database
|
||||||
|
if relpath not in db_rows:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _check_file(self, file_path, file_checksum):
|
||||||
|
"""Check if file checksum as changed"""
|
||||||
|
relpath = os.path.relpath(file_path, self.root)
|
||||||
|
db_checksum = self.db.sqlite.get_checksum(relpath)
|
||||||
|
# Check if checksum match
|
||||||
|
if not db_checksum:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if db_checksum != file_checksum:
|
||||||
|
self.log.warning(f'{file_path} checksum as changed')
|
||||||
|
self.log.info(
|
||||||
|
f'file_checksum={file_checksum},\ndb_checksum={db_checksum}'
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
def check_db(self):
|
def check_db(self):
|
||||||
"""
|
"""
|
||||||
Check if db FilePath match to collection filesystem
|
Check if db FilePath match to collection filesystem
|
||||||
|
@ -833,12 +875,20 @@ class Collection(SortMedias):
|
||||||
file_paths = list(self.get_collection_files())
|
file_paths = list(self.get_collection_files())
|
||||||
db_rows = [row['FilePath'] for row in self.db.sqlite.get_rows('metadata')]
|
db_rows = [row['FilePath'] for row in self.db.sqlite.get_rows('metadata')]
|
||||||
for file_path in file_paths:
|
for file_path in file_paths:
|
||||||
relpath = os.path.relpath(file_path, self.root)
|
result = self.file_in_db(file_path, db_rows)
|
||||||
# If file not in database
|
checksum = utils.checksum(file_path)
|
||||||
if relpath not in db_rows:
|
if not result:
|
||||||
self.log.error('Db data is not accurate')
|
self.log.error('Db data is not accurate')
|
||||||
self.log.info(f'{file_path} not in db')
|
self.log.info(f'{file_path} not in db')
|
||||||
return False
|
return False
|
||||||
|
elif not self._check_file(file_path, checksum):
|
||||||
|
# We d'ont want to silently ignore or correct this without
|
||||||
|
# resetting the cache as is could be due to file corruption
|
||||||
|
self.log.error(f'modified or corrupted file.')
|
||||||
|
self.log.info(
|
||||||
|
'Use ordigi update --checksum or --reset-cache, check database integrity or try to restore the file'
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
nb_files = len(file_paths)
|
nb_files = len(file_paths)
|
||||||
nb_row = len(db_rows)
|
nb_row = len(db_rows)
|
||||||
|
@ -882,7 +932,7 @@ class Collection(SortMedias):
|
||||||
|
|
||||||
return self.summary
|
return self.summary
|
||||||
|
|
||||||
def update(self, loc):
|
def update(self, loc, update_checksum=False):
|
||||||
"""Update collection db"""
|
"""Update collection db"""
|
||||||
file_paths = list(self.get_collection_files())
|
file_paths = list(self.get_collection_files())
|
||||||
db_rows = list(self.db.sqlite.get_rows('metadata'))
|
db_rows = list(self.db.sqlite.get_rows('metadata'))
|
||||||
|
@ -897,9 +947,22 @@ class Collection(SortMedias):
|
||||||
|
|
||||||
for file_path in file_paths:
|
for file_path in file_paths:
|
||||||
relpath = os.path.relpath(file_path, self.root)
|
relpath = os.path.relpath(file_path, self.root)
|
||||||
|
metadata = {}
|
||||||
|
|
||||||
|
checksum = utils.checksum(file_path)
|
||||||
|
if not self._check_file(file_path, checksum) and update_checksum:
|
||||||
|
# metatata will fill checksum from file
|
||||||
|
metadata = self.medias.get_metadata(
|
||||||
|
file_path, self.root, checksum, loc=loc
|
||||||
|
)
|
||||||
|
metadata['file_path'] = relpath
|
||||||
|
# set row attribute to the file
|
||||||
|
self.db.add_file_data(metadata)
|
||||||
|
self.summary.append('update', file_path)
|
||||||
|
|
||||||
# If file not in database
|
# If file not in database
|
||||||
if relpath not in db_paths:
|
if relpath not in db_paths:
|
||||||
metadata = self.medias.get_metadata(file_path, self.root, loc)
|
metadata = self.medias.get_metadata(file_path, self.root, loc=loc)
|
||||||
metadata['file_path'] = relpath
|
metadata['file_path'] = relpath
|
||||||
# Check if file checksum is in invalid rows
|
# Check if file checksum is in invalid rows
|
||||||
row = []
|
row = []
|
||||||
|
@ -923,19 +986,6 @@ class Collection(SortMedias):
|
||||||
|
|
||||||
return self.summary
|
return self.summary
|
||||||
|
|
||||||
def check_files(self):
|
|
||||||
"""Check file integrity."""
|
|
||||||
for file_path in self.paths.get_files(self.root):
|
|
||||||
checksum = utils.checksum(file_path)
|
|
||||||
relpath = file_path.relative_to(self.root)
|
|
||||||
if checksum == self.db.sqlite.get_checksum(relpath):
|
|
||||||
self.summary.append('check', True, file_path)
|
|
||||||
else:
|
|
||||||
self.log.error(f'{file_path} is corrupted')
|
|
||||||
self.summary.append('check', False, file_path)
|
|
||||||
|
|
||||||
return self.summary
|
|
||||||
|
|
||||||
def set_utime_from_metadata(self, date_media, file_path):
|
def set_utime_from_metadata(self, date_media, file_path):
|
||||||
"""Set the modification time on the file based on the file name."""
|
"""Set the modification time on the file based on the file name."""
|
||||||
|
|
||||||
|
@ -1002,17 +1052,15 @@ class Collection(SortMedias):
|
||||||
|
|
||||||
return self.summary
|
return self.summary
|
||||||
|
|
||||||
def sort_files(
|
def sort_files(self, src_dirs, loc, imp=False):
|
||||||
self, src_dirs, path_format, loc, imp=False, remove_duplicates=False
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Sort files into appropriate folder
|
Sort files into appropriate folder
|
||||||
"""
|
"""
|
||||||
# Check db
|
# Check db
|
||||||
self._init_check_db(loc)
|
self._init_check_db(loc)
|
||||||
|
|
||||||
# if path format client option is set overwrite it
|
path_format = self.opt['Path']['path_format']
|
||||||
self._set_option('Path', 'path_format', path_format)
|
self.log.debug(f'path_format: {path_format}')
|
||||||
|
|
||||||
# Get medias data
|
# Get medias data
|
||||||
subdirs = set()
|
subdirs = set()
|
||||||
|
@ -1026,7 +1074,7 @@ class Collection(SortMedias):
|
||||||
self.medias.datas[src_path] = copy(metadata)
|
self.medias.datas[src_path] = copy(metadata)
|
||||||
|
|
||||||
# Sort files and solve conflicts
|
# Sort files and solve conflicts
|
||||||
self.summary = self.sort_medias(imp, remove_duplicates)
|
self.summary = self.sort_medias(imp)
|
||||||
|
|
||||||
if imp != 'copy':
|
if imp != 'copy':
|
||||||
self.remove_empty_subdirs(subdirs, src_dirs)
|
self.remove_empty_subdirs(subdirs, src_dirs)
|
||||||
|
@ -1036,7 +1084,7 @@ class Collection(SortMedias):
|
||||||
|
|
||||||
return self.summary
|
return self.summary
|
||||||
|
|
||||||
def dedup_path(self, paths, dedup_regex=None, remove_duplicates=False):
|
def dedup_path(self, paths, dedup_regex=None):
|
||||||
"""Deduplicate file path parts"""
|
"""Deduplicate file path parts"""
|
||||||
|
|
||||||
# Check db
|
# Check db
|
||||||
|
@ -1077,7 +1125,7 @@ class Collection(SortMedias):
|
||||||
self.medias.datas[src_path] = copy(metadata)
|
self.medias.datas[src_path] = copy(metadata)
|
||||||
|
|
||||||
# Sort files and solve conflicts
|
# Sort files and solve conflicts
|
||||||
self.sort_medias(remove_duplicates=remove_duplicates)
|
self.sort_medias()
|
||||||
|
|
||||||
if not self.check_db():
|
if not self.check_db():
|
||||||
self.summary.append('check', False)
|
self.summary.append('check', False)
|
||||||
|
@ -1109,7 +1157,7 @@ class Collection(SortMedias):
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def sort_similar_images(self, path, similarity=80, remove_duplicates=False):
|
def sort_similar_images(self, path, similarity=80):
|
||||||
"""Sort similar images using imagehash library"""
|
"""Sort similar images using imagehash library"""
|
||||||
# Check db
|
# Check db
|
||||||
self._init_check_db()
|
self._init_check_db()
|
||||||
|
@ -1128,7 +1176,7 @@ class Collection(SortMedias):
|
||||||
)
|
)
|
||||||
if similar_images:
|
if similar_images:
|
||||||
# Move the simlars file into the destination directory
|
# Move the simlars file into the destination directory
|
||||||
self.sort_medias(remove_duplicates=remove_duplicates)
|
self.sort_medias()
|
||||||
|
|
||||||
nb_row_end = self.db.sqlite.len('metadata')
|
nb_row_end = self.db.sqlite.len('metadata')
|
||||||
if nb_row_ini and nb_row_ini != nb_row_end:
|
if nb_row_ini and nb_row_ini != nb_row_end:
|
||||||
|
@ -1179,8 +1227,6 @@ class Collection(SortMedias):
|
||||||
media.metadata['longitude'] = coordinates['longitude']
|
media.metadata['longitude'] = coordinates['longitude']
|
||||||
media.set_location_from_coordinates(loc)
|
media.set_location_from_coordinates(loc)
|
||||||
|
|
||||||
# Update database
|
|
||||||
self.db.add_file_data(media.metadata)
|
|
||||||
# Update exif data
|
# Update exif data
|
||||||
if key in (
|
if key in (
|
||||||
'date_original',
|
'date_original',
|
||||||
|
@ -1206,6 +1252,13 @@ class Collection(SortMedias):
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
result = exif.set_key_values(key, value)
|
result = exif.set_key_values(key, value)
|
||||||
|
|
||||||
|
# Update checksum
|
||||||
|
media.metadata['checksum'] = utils.checksum(file_path)
|
||||||
|
|
||||||
|
# Update database
|
||||||
|
self.db.add_file_data(media.metadata)
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
self.summary.append('update', True, file_path)
|
self.summary.append('update', True, file_path)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -53,9 +53,9 @@ class Config:
|
||||||
else:
|
else:
|
||||||
self.conf = conf
|
self.conf = conf
|
||||||
|
|
||||||
self.options = self.set_default_options()
|
self.options = self.get_default_options()
|
||||||
|
|
||||||
def set_default_options(self) -> dict:
|
def get_default_options(self) -> dict:
|
||||||
# Initialize with default options
|
# Initialize with default options
|
||||||
return {
|
return {
|
||||||
'Exif': {
|
'Exif': {
|
||||||
|
@ -71,6 +71,7 @@ class Config:
|
||||||
'extensions': None,
|
'extensions': None,
|
||||||
'glob': '**/*',
|
'glob': '**/*',
|
||||||
'max_deep': None,
|
'max_deep': None,
|
||||||
|
'remove_duplicates': False,
|
||||||
},
|
},
|
||||||
'Geolocation': {
|
'Geolocation': {
|
||||||
'geocoder': constants.DEFAULT_GEOCODER,
|
'geocoder': constants.DEFAULT_GEOCODER,
|
||||||
|
@ -137,6 +138,7 @@ class Config:
|
||||||
'dry_run',
|
'dry_run',
|
||||||
'interactive',
|
'interactive',
|
||||||
'prefer_english_names',
|
'prefer_english_names',
|
||||||
|
'remove_duplicates',
|
||||||
'use_date_filename',
|
'use_date_filename',
|
||||||
'use_file_dates',
|
'use_file_dates',
|
||||||
}
|
}
|
||||||
|
|
|
@ -310,7 +310,9 @@ class Sqlite:
|
||||||
value = None
|
value = None
|
||||||
self.cur.execute('SELECT * FROM location')
|
self.cur.execute('SELECT * FROM location')
|
||||||
for row in self.cur:
|
for row in self.cur:
|
||||||
distance = distance_between_two_points(latitude, longitude, row[0], row[1])
|
distance = distance_between_two_points(
|
||||||
|
latitude, longitude, row['Latitude'], row['Longitude']
|
||||||
|
)
|
||||||
# Use if closer then threshold_km reuse lookup
|
# Use if closer then threshold_km reuse lookup
|
||||||
if distance < shorter_distance and distance <= threshold_m:
|
if distance < shorter_distance and distance <= threshold_m:
|
||||||
shorter_distance = distance
|
shorter_distance = distance
|
||||||
|
|
|
@ -62,7 +62,7 @@ class ExifMetadata:
|
||||||
]
|
]
|
||||||
tags_keys['latitude_ref'] = ['EXIF:GPSLatitudeRef']
|
tags_keys['latitude_ref'] = ['EXIF:GPSLatitudeRef']
|
||||||
tags_keys['longitude_ref'] = ['EXIF:GPSLongitudeRef']
|
tags_keys['longitude_ref'] = ['EXIF:GPSLongitudeRef']
|
||||||
tags_keys['original_name'] = ['XMP:OriginalFileName']
|
tags_keys['original_name'] = ['EXIF:OriginalFileName', 'XMP:OriginalFileName']
|
||||||
|
|
||||||
# Remove ignored tag from list
|
# Remove ignored tag from list
|
||||||
for tag_regex in self.ignore_tags:
|
for tag_regex in self.ignore_tags:
|
||||||
|
@ -279,6 +279,7 @@ class Media(ReadExif):
|
||||||
ignore_tags=None,
|
ignore_tags=None,
|
||||||
interactive=False,
|
interactive=False,
|
||||||
cache=True,
|
cache=True,
|
||||||
|
checksum=None,
|
||||||
use_date_filename=False,
|
use_date_filename=False,
|
||||||
use_file_dates=False,
|
use_file_dates=False,
|
||||||
):
|
):
|
||||||
|
@ -292,6 +293,11 @@ class Media(ReadExif):
|
||||||
|
|
||||||
self.album_from_folder = album_from_folder
|
self.album_from_folder = album_from_folder
|
||||||
self.cache = cache
|
self.cache = cache
|
||||||
|
if checksum:
|
||||||
|
self.checksum = checksum
|
||||||
|
else:
|
||||||
|
self.checksum = utils.checksum(file_path)
|
||||||
|
|
||||||
self.interactive = interactive
|
self.interactive = interactive
|
||||||
self.log = LOG.getChild(self.__class__.__name__)
|
self.log = LOG.getChild(self.__class__.__name__)
|
||||||
self.metadata = None
|
self.metadata = None
|
||||||
|
@ -359,9 +365,9 @@ class Media(ReadExif):
|
||||||
stem = os.path.splitext(filename)[0]
|
stem = os.path.splitext(filename)[0]
|
||||||
date_original = self.metadata['date_original']
|
date_original = self.metadata['date_original']
|
||||||
if self.metadata['original_name']:
|
if self.metadata['original_name']:
|
||||||
date_filename = utils.get_date_from_string(self.metadata['original_name'])
|
date_filename, _, _ = utils.get_date_from_string(self.metadata['original_name'])
|
||||||
else:
|
else:
|
||||||
date_filename = utils.get_date_from_string(stem)
|
date_filename, _, _ = utils.get_date_from_string(stem)
|
||||||
self.log.debug(f'date_filename: {date_filename}')
|
self.log.debug(f'date_filename: {date_filename}')
|
||||||
|
|
||||||
date_original = self.metadata['date_original']
|
date_original = self.metadata['date_original']
|
||||||
|
@ -370,6 +376,8 @@ class Media(ReadExif):
|
||||||
file_modify_date = self.metadata['file_modify_date']
|
file_modify_date = self.metadata['file_modify_date']
|
||||||
if self.metadata['date_original']:
|
if self.metadata['date_original']:
|
||||||
if date_filename and date_filename != date_original:
|
if date_filename and date_filename != date_original:
|
||||||
|
timedelta = abs(date_original - date_filename)
|
||||||
|
if timedelta.total_seconds() > 60:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
f"{filename} time mark is different from {date_original}"
|
f"{filename} time mark is different from {date_original}"
|
||||||
)
|
)
|
||||||
|
@ -385,13 +393,15 @@ class Media(ReadExif):
|
||||||
|
|
||||||
return self.metadata['date_original']
|
return self.metadata['date_original']
|
||||||
|
|
||||||
self.log.warning(f"could not find original date for {self.file_path}")
|
self.log.warning(f"could not find date original for {self.file_path}")
|
||||||
|
|
||||||
if self.use_date_filename and date_filename:
|
if self.use_date_filename and date_filename:
|
||||||
self.log.info(
|
self.log.info(
|
||||||
f"use date from filename:{date_filename} for {self.file_path}"
|
f"use date from filename:{date_filename} for {self.file_path}"
|
||||||
)
|
)
|
||||||
if date_created and date_filename > date_created:
|
if date_created and date_filename > date_created:
|
||||||
|
timedelta = abs(date_created - date_filename)
|
||||||
|
if timedelta.total_seconds() > 60:
|
||||||
self.log.warning(
|
self.log.warning(
|
||||||
f"{filename} time mark is more recent than {date_created}"
|
f"{filename} time mark is more recent than {date_created}"
|
||||||
)
|
)
|
||||||
|
@ -523,31 +533,6 @@ class Media(ReadExif):
|
||||||
|
|
||||||
return db.get_metadata(relpath, 'LocationId')
|
return db.get_metadata(relpath, 'LocationId')
|
||||||
|
|
||||||
def _check_file(self, db, root):
|
|
||||||
"""Check if file_path is a subpath of root"""
|
|
||||||
|
|
||||||
if str(self.file_path).startswith(str(root)):
|
|
||||||
relpath = os.path.relpath(self.file_path, root)
|
|
||||||
db_checksum = db.get_checksum(relpath)
|
|
||||||
file_checksum = self.metadata['checksum']
|
|
||||||
# Check if checksum match
|
|
||||||
if db_checksum and db_checksum != file_checksum:
|
|
||||||
self.log.error(f'{self.file_path} checksum has changed')
|
|
||||||
self.log.error('(modified or corrupted file).')
|
|
||||||
self.log.error(
|
|
||||||
f'file_checksum={file_checksum},\ndb_checksum={db_checksum}'
|
|
||||||
)
|
|
||||||
self.log.info(
|
|
||||||
'Use --reset-cache, check database integrity or try to restore the file'
|
|
||||||
)
|
|
||||||
# We d'ont want to silently ignore or correct this without
|
|
||||||
# resetting the cache as is could be due to file corruption
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
return relpath, db_checksum
|
|
||||||
|
|
||||||
return None, None
|
|
||||||
|
|
||||||
def set_location_from_db(self, location_id, db):
|
def set_location_from_db(self, location_id, db):
|
||||||
|
|
||||||
self.metadata['location_id'] = location_id
|
self.metadata['location_id'] = location_id
|
||||||
|
@ -607,17 +592,17 @@ class Media(ReadExif):
|
||||||
All keys will be present and have a value of None if not obtained.
|
All keys will be present and have a value of None if not obtained.
|
||||||
"""
|
"""
|
||||||
self.metadata = {}
|
self.metadata = {}
|
||||||
self.metadata['checksum'] = utils.checksum(self.file_path)
|
self.metadata['checksum'] = self.checksum
|
||||||
|
|
||||||
db_checksum = False
|
db_checksum = False
|
||||||
location_id = None
|
location_id = None
|
||||||
if cache and db:
|
if cache and db and str(self.file_path).startswith(str(root)):
|
||||||
relpath, db_checksum = self._check_file(db, root)
|
relpath = os.path.relpath(self.file_path, root)
|
||||||
|
db_checksum = db.get_checksum(relpath)
|
||||||
if db_checksum:
|
if db_checksum:
|
||||||
location_id = self._set_metadata_from_db(db, relpath)
|
location_id = self._set_metadata_from_db(db, relpath)
|
||||||
self.set_location_from_db(location_id, db)
|
self.set_location_from_db(location_id, db)
|
||||||
else:
|
else:
|
||||||
# file not in db
|
|
||||||
self.metadata['src_dir'] = str(self.src_dir)
|
self.metadata['src_dir'] = str(self.src_dir)
|
||||||
self.metadata['subdirs'] = str(
|
self.metadata['subdirs'] = str(
|
||||||
self.file_path.relative_to(self.src_dir).parent
|
self.file_path.relative_to(self.src_dir).parent
|
||||||
|
@ -684,7 +669,7 @@ class Medias:
|
||||||
self.datas = {}
|
self.datas = {}
|
||||||
self.theme = request.load_theme()
|
self.theme = request.load_theme()
|
||||||
|
|
||||||
def get_media(self, file_path, src_dir):
|
def get_media(self, file_path, src_dir, checksum=None):
|
||||||
media = Media(
|
media = Media(
|
||||||
file_path,
|
file_path,
|
||||||
src_dir,
|
src_dir,
|
||||||
|
@ -692,28 +677,34 @@ class Medias:
|
||||||
self.exif_opt['ignore_tags'],
|
self.exif_opt['ignore_tags'],
|
||||||
self.interactive,
|
self.interactive,
|
||||||
self.exif_opt['cache'],
|
self.exif_opt['cache'],
|
||||||
|
checksum,
|
||||||
self.exif_opt['use_date_filename'],
|
self.exif_opt['use_date_filename'],
|
||||||
self.exif_opt['use_file_dates'],
|
self.exif_opt['use_file_dates'],
|
||||||
)
|
)
|
||||||
|
|
||||||
return media
|
return media
|
||||||
|
|
||||||
def get_media_data(self, file_path, src_dir, loc=None):
|
def get_media_data(self, file_path, src_dir, checksum=None, loc=None):
|
||||||
media = self.get_media(file_path, src_dir)
|
media = self.get_media(file_path, src_dir, checksum)
|
||||||
media.get_metadata(
|
media.get_metadata(
|
||||||
self.root, loc, self.db.sqlite, self.exif_opt['cache']
|
self.root, loc, self.db.sqlite, self.exif_opt['cache']
|
||||||
)
|
)
|
||||||
|
|
||||||
return media
|
return media
|
||||||
|
|
||||||
def get_metadata(self, src_path, src_dir, loc=None):
|
def get_metadata(self, src_path, src_dir, checksum=None, loc=None):
|
||||||
"""Get metadata"""
|
"""Get metadata"""
|
||||||
return self.get_media_data(src_path, src_dir, loc).metadata
|
return self.get_media_data(src_path, src_dir, checksum, loc).metadata
|
||||||
|
|
||||||
def get_paths(self, src_dirs, imp=False):
|
def get_paths(self, src_dirs, imp=False):
|
||||||
"""Get paths"""
|
"""Get paths"""
|
||||||
for src_dir in src_dirs:
|
for src_dir in src_dirs:
|
||||||
src_dir = self.paths.check(src_dir)
|
src_dir = self.paths.check(src_dir)
|
||||||
|
|
||||||
|
if src_dir.is_file():
|
||||||
|
yield src_dir.parent, src_dir
|
||||||
|
continue
|
||||||
|
|
||||||
paths = self.paths.get_paths_list(src_dir)
|
paths = self.paths.get_paths_list(src_dir)
|
||||||
|
|
||||||
# Get medias and src_dirs
|
# Get medias and src_dirs
|
||||||
|
@ -730,7 +721,7 @@ class Medias:
|
||||||
"""Get medias datas"""
|
"""Get medias datas"""
|
||||||
for src_dir, src_path in self.get_paths(src_dirs, imp=imp):
|
for src_dir, src_path in self.get_paths(src_dirs, imp=imp):
|
||||||
# Get file metadata
|
# Get file metadata
|
||||||
media = self.get_media_data(src_path, src_dir, loc)
|
media = self.get_media_data(src_path, src_dir, loc=loc)
|
||||||
|
|
||||||
yield src_path, media
|
yield src_path, media
|
||||||
|
|
||||||
|
@ -738,11 +729,11 @@ class Medias:
|
||||||
"""Get medias data"""
|
"""Get medias data"""
|
||||||
for src_dir, src_path in self.get_paths(src_dirs, imp=imp):
|
for src_dir, src_path in self.get_paths(src_dirs, imp=imp):
|
||||||
# Get file metadata
|
# Get file metadata
|
||||||
metadata = self.get_metadata(src_path, src_dir, loc)
|
metadata = self.get_metadata(src_path, src_dir, loc=loc)
|
||||||
|
|
||||||
yield src_path, metadata
|
yield src_path, metadata
|
||||||
|
|
||||||
def update_exif_data(self, metadata):
|
def update_exif_data(self, metadata, imp=False):
|
||||||
|
|
||||||
file_path = self.root / metadata['file_path']
|
file_path = self.root / metadata['file_path']
|
||||||
exif = WriteExif(
|
exif = WriteExif(
|
||||||
|
@ -752,8 +743,8 @@ class Medias:
|
||||||
)
|
)
|
||||||
|
|
||||||
updated = False
|
updated = False
|
||||||
if metadata['original_name'] in (None, ''):
|
if imp and metadata['original_name'] in (None, ''):
|
||||||
exif.set_value('original_name', metadata['filename'])
|
exif.set_key_values('original_name', metadata['filename'])
|
||||||
updated = True
|
updated = True
|
||||||
if self.exif_opt['album_from_folder']:
|
if self.exif_opt['album_from_folder']:
|
||||||
exif.set_album_from_folder()
|
exif.set_album_from_folder()
|
||||||
|
|
|
@ -69,17 +69,17 @@ def get_date_regex(user_regex=None):
|
||||||
# regex to match date format type %Y%m%d, %y%m%d, %d%m%Y,
|
# regex to match date format type %Y%m%d, %y%m%d, %d%m%Y,
|
||||||
# etc...
|
# etc...
|
||||||
'a': re.compile(
|
'a': re.compile(
|
||||||
r'.*[_-]?(?P<year>\d{4})[_-]?(?P<month>\d{2})[_-]?(?P<day>\d{2})[_-]?(?P<hour>\d{2})[_-]?(?P<minute>\d{2})[_-]?(?P<second>\d{2})'
|
r'[-_./ ](?P<year>\d{4})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})[-_.]?(?P<hour>\d{2})[-_.]?(?P<minute>\d{2})[-_.]?(?P<second>\d{2})([-_./ ])'
|
||||||
),
|
),
|
||||||
'b': re.compile(
|
'b': re.compile(
|
||||||
r'[-_./](?P<year>\d{4})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})[-_./]'
|
r'[-_./ ](?P<year>\d{4})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})([-_./ ])'
|
||||||
),
|
),
|
||||||
# not very accurate
|
# not very accurate
|
||||||
'c': re.compile(
|
'c': re.compile(
|
||||||
r'[-_./](?P<year>\d{2})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})[-_./]'
|
r'[-_./ ](?P<year>\d{2})[-_.]?(?P<month>\d{2})[-_.]?(?P<day>\d{2})([-_./ ])'
|
||||||
),
|
),
|
||||||
'd': re.compile(
|
'd': re.compile(
|
||||||
r'[-_./](?P<day>\d{2})[-_.](?P<month>\d{2})[-_.](?P<year>\d{4})[-_./]'
|
r'[-_./ ](?P<day>\d{2})[-_.](?P<month>\d{2})[-_.](?P<year>\d{4})([-_./ ])'
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -96,15 +96,18 @@ def get_date_from_string(string):
|
||||||
# Otherwise assume a filename such as IMG_20160915_123456.jpg as default.
|
# Otherwise assume a filename such as IMG_20160915_123456.jpg as default.
|
||||||
|
|
||||||
matches = []
|
matches = []
|
||||||
|
sep = ''
|
||||||
for i, regex in DATE_REGEX.items():
|
for i, regex in DATE_REGEX.items():
|
||||||
match = re.findall(regex, string)
|
match = re.findall(regex, string)
|
||||||
if match != []:
|
if match != []:
|
||||||
|
sep = match[0][3]
|
||||||
if i == 'c':
|
if i == 'c':
|
||||||
match = [('20' + match[0][0], match[0][1], match[0][2])]
|
match = [('20' + match[0][0], match[0][1], match[0][2])]
|
||||||
elif i == 'd':
|
elif i == 'd':
|
||||||
# reorder items
|
# reorder items
|
||||||
match = [(match[0][2], match[0][1], match[0][0])]
|
match = [(match[0][2], match[0][1], match[0][0])]
|
||||||
# matches = match + matches
|
else:
|
||||||
|
match = [(match[0][0], match[0][1], match[0][2])]
|
||||||
if len(match) != 1:
|
if len(match) != 1:
|
||||||
# The time string is not uniq
|
# The time string is not uniq
|
||||||
continue
|
continue
|
||||||
|
@ -119,9 +122,11 @@ def get_date_from_string(string):
|
||||||
date_object = tuple(map(int, matches[0][0]))
|
date_object = tuple(map(int, matches[0][0]))
|
||||||
date = datetime(*date_object)
|
date = datetime(*date_object)
|
||||||
except (KeyError, ValueError):
|
except (KeyError, ValueError):
|
||||||
return None
|
return None, matches[0][1], sep
|
||||||
|
|
||||||
return date
|
return date, matches[0][1], sep
|
||||||
|
|
||||||
|
return None, None, sep
|
||||||
|
|
||||||
|
|
||||||
def match_date_regex(regex, value):
|
def match_date_regex(regex, value):
|
||||||
|
|
|
@ -154,6 +154,7 @@ class TestOrdigi:
|
||||||
shutil.copyfile(file_path, dest_path)
|
shutil.copyfile(file_path, dest_path)
|
||||||
for opt, arg in self.logger_options:
|
for opt, arg in self.logger_options:
|
||||||
self.assert_cli(cli._update, [opt, arg, str(self.src_path)])
|
self.assert_cli(cli._update, [opt, arg, str(self.src_path)])
|
||||||
|
self.assert_cli(cli._update, ['--checksum', str(self.src_path)])
|
||||||
|
|
||||||
def assert_check(self):
|
def assert_check(self):
|
||||||
for opt, arg in self.logger_options:
|
for opt, arg in self.logger_options:
|
||||||
|
@ -187,7 +188,6 @@ class TestOrdigi:
|
||||||
def test_init_update_check_clean(self):
|
def test_init_update_check_clean(self):
|
||||||
self.assert_init()
|
self.assert_init()
|
||||||
self.assert_update()
|
self.assert_update()
|
||||||
self.assert_check()
|
|
||||||
self.assert_clean()
|
self.assert_clean()
|
||||||
|
|
||||||
def test_import(self, tmp_path):
|
def test_import(self, tmp_path):
|
||||||
|
@ -206,7 +206,6 @@ class TestOrdigi:
|
||||||
('--exclude', '.DS_Store'),
|
('--exclude', '.DS_Store'),
|
||||||
*self.filter_options,
|
*self.filter_options,
|
||||||
('--path-format', '{%Y}/{folder}/{stem}.{ext}'),
|
('--path-format', '{%Y}/{folder}/{stem}.{ext}'),
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
paths = (str(self.src_path), str(tmp_path))
|
paths = (str(self.src_path), str(tmp_path))
|
||||||
|
@ -240,6 +239,9 @@ class TestOrdigi:
|
||||||
self.assert_cli(cli._compare, paths)
|
self.assert_cli(cli._compare, paths)
|
||||||
self.assert_options(cli._compare, bool_options, arg_options, paths)
|
self.assert_options(cli._compare, bool_options, arg_options, paths)
|
||||||
|
|
||||||
|
def test_check(self):
|
||||||
|
self.assert_check()
|
||||||
|
|
||||||
|
|
||||||
def test_needsfiles(tmpdir):
|
def test_needsfiles(tmpdir):
|
||||||
assert tmpdir
|
assert tmpdir
|
||||||
|
|
|
@ -137,11 +137,12 @@ class TestCollection:
|
||||||
assert summary.success_table.sum('sort') == nb
|
assert summary.success_table.sum('sort') == nb
|
||||||
|
|
||||||
def test_sort_files(self, tmp_path):
|
def test_sort_files(self, tmp_path):
|
||||||
cli_options = {'album_from_folder': True, 'cache': False}
|
cli_options = {
|
||||||
|
'album_from_folder': True, 'cache': False, 'path_format': self.path_format
|
||||||
|
}
|
||||||
collection = Collection(tmp_path, cli_options=cli_options)
|
collection = Collection(tmp_path, cli_options=cli_options)
|
||||||
loc = GeoLocation()
|
loc = GeoLocation()
|
||||||
summary = collection.sort_files([self.src_path],
|
summary = collection.sort_files([self.src_path], loc, imp='copy')
|
||||||
self.path_format, loc, imp='copy')
|
|
||||||
|
|
||||||
self.assert_import(summary, 29)
|
self.assert_import(summary, 29)
|
||||||
|
|
||||||
|
@ -166,16 +167,16 @@ class TestCollection:
|
||||||
collection = Collection(tmp_path, cli_options=cli_options)
|
collection = Collection(tmp_path, cli_options=cli_options)
|
||||||
# Try to change path format and sort files again
|
# Try to change path format and sort files again
|
||||||
path_format = 'test_exif/<city>/<%Y>-<name>.%l<ext>'
|
path_format = 'test_exif/<city>/<%Y>-<name>.%l<ext>'
|
||||||
summary = collection.sort_files([tmp_path], path_format, loc)
|
summary = collection.sort_files([tmp_path], loc)
|
||||||
|
|
||||||
self.assert_sort(summary, 27)
|
self.assert_sort(summary, 26)
|
||||||
|
|
||||||
shutil.copytree(tmp_path / 'test_exif', tmp_path / 'test_exif_copy')
|
shutil.copytree(tmp_path / 'test_exif', tmp_path / 'test_exif_copy')
|
||||||
collection.summary = Summary(tmp_path)
|
collection.summary = Summary(tmp_path)
|
||||||
assert collection.summary.success_table.sum() == 0
|
assert collection.summary.success_table.sum() == 0
|
||||||
summary = collection.update(loc)
|
summary = collection.update(loc)
|
||||||
assert summary.success_table.sum('update') == 29
|
assert summary.success_table.sum('update') == 2
|
||||||
assert summary.success_table.sum() == 29
|
assert summary.success_table.sum() == 2
|
||||||
assert not summary.errors
|
assert not summary.errors
|
||||||
collection.summary = Summary(tmp_path)
|
collection.summary = Summary(tmp_path)
|
||||||
summary = collection.update(loc)
|
summary = collection.update(loc)
|
||||||
|
@ -195,12 +196,11 @@ class TestCollection:
|
||||||
assert not summary.errors
|
assert not summary.errors
|
||||||
|
|
||||||
def test_sort_files_invalid_db(self, tmp_path):
|
def test_sort_files_invalid_db(self, tmp_path):
|
||||||
collection = Collection(tmp_path)
|
collection = Collection(tmp_path, {'path_format': self.path_format})
|
||||||
loc = GeoLocation()
|
loc = GeoLocation()
|
||||||
randomize_db(tmp_path)
|
randomize_db(tmp_path)
|
||||||
with pytest.raises(sqlite3.DatabaseError) as e:
|
with pytest.raises(sqlite3.DatabaseError) as e:
|
||||||
summary = collection.sort_files([self.src_path],
|
summary = collection.sort_files([self.src_path], loc, imp='copy')
|
||||||
self.path_format, loc, imp='copy')
|
|
||||||
|
|
||||||
def test_sort_file(self, tmp_path):
|
def test_sort_file(self, tmp_path):
|
||||||
for imp in ('copy', 'move', False):
|
for imp in ('copy', 'move', False):
|
||||||
|
@ -218,6 +218,7 @@ class TestCollection:
|
||||||
)
|
)
|
||||||
assert not summary.errors
|
assert not summary.errors
|
||||||
# Ensure files remain the same
|
# Ensure files remain the same
|
||||||
|
if not imp:
|
||||||
assert collection._checkcomp(dest_path, src_checksum)
|
assert collection._checkcomp(dest_path, src_checksum)
|
||||||
|
|
||||||
if imp == 'copy':
|
if imp == 'copy':
|
||||||
|
|
|
@ -28,7 +28,7 @@ class TestSqlite:
|
||||||
'CameraMake': 'camera_make',
|
'CameraMake': 'camera_make',
|
||||||
'CameraModel': 'camera_model',
|
'CameraModel': 'camera_model',
|
||||||
'OriginalName':'original_name',
|
'OriginalName':'original_name',
|
||||||
'SrcPath': 'src_path',
|
'SrcDir': 'src_dir',
|
||||||
'Subdirs': 'subdirs',
|
'Subdirs': 'subdirs',
|
||||||
'Filename': 'filename'
|
'Filename': 'filename'
|
||||||
}
|
}
|
||||||
|
@ -44,8 +44,8 @@ class TestSqlite:
|
||||||
'Location': 'location'
|
'Location': 'location'
|
||||||
}
|
}
|
||||||
|
|
||||||
cls.sqlite.add_row('metadata', row_data)
|
cls.sqlite.upsert_metadata(row_data)
|
||||||
cls.sqlite.add_row('location', location_data)
|
cls.sqlite.upsert_location(location_data)
|
||||||
# cls.sqlite.add_metadata_data('filename', 'ksinslsdosic', 'original_name', 'date_original', 'album', 1)
|
# cls.sqlite.add_metadata_data('filename', 'ksinslsdosic', 'original_name', 'date_original', 'album', 1)
|
||||||
# cls.sqlite.add_location(24.2, 7.3, 'city', 'state', 'country', 'location')
|
# cls.sqlite.add_location(24.2, 7.3, 'city', 'state', 'country', 'location')
|
||||||
|
|
||||||
|
@ -66,6 +66,7 @@ class TestSqlite:
|
||||||
result = tuple(self.sqlite.cur.execute("""select * from metadata where
|
result = tuple(self.sqlite.cur.execute("""select * from metadata where
|
||||||
rowid=1""").fetchone())
|
rowid=1""").fetchone())
|
||||||
assert result == (
|
assert result == (
|
||||||
|
1,
|
||||||
'file_path',
|
'file_path',
|
||||||
'checksum',
|
'checksum',
|
||||||
'album',
|
'album',
|
||||||
|
@ -79,7 +80,7 @@ class TestSqlite:
|
||||||
'camera_make',
|
'camera_make',
|
||||||
'camera_model',
|
'camera_model',
|
||||||
'original_name',
|
'original_name',
|
||||||
'src_path',
|
'src_dir',
|
||||||
'subdirs',
|
'subdirs',
|
||||||
'filename'
|
'filename'
|
||||||
)
|
)
|
||||||
|
@ -96,7 +97,9 @@ class TestSqlite:
|
||||||
result = tuple(self.sqlite.cur.execute("""select * from location where
|
result = tuple(self.sqlite.cur.execute("""select * from location where
|
||||||
rowid=1""").fetchone())
|
rowid=1""").fetchone())
|
||||||
assert result == (
|
assert result == (
|
||||||
24.2, 7.3,
|
1,
|
||||||
|
24.2,
|
||||||
|
7.3,
|
||||||
'latitude_ref',
|
'latitude_ref',
|
||||||
'longitude_ref',
|
'longitude_ref',
|
||||||
'city',
|
'city',
|
||||||
|
|
|
@ -90,10 +90,10 @@ class TestMedia:
|
||||||
date_filename = None
|
date_filename = None
|
||||||
for tag in media.tags_keys['original_name']:
|
for tag in media.tags_keys['original_name']:
|
||||||
if tag in exif_data:
|
if tag in exif_data:
|
||||||
date_filename = get_date_from_string(exif_data[tag])
|
date_filename, _, _ = get_date_from_string(exif_data[tag])
|
||||||
break
|
break
|
||||||
if not date_filename:
|
if not date_filename:
|
||||||
date_filename = get_date_from_string(file_path.name)
|
date_filename, _, _ = get_date_from_string(file_path.name)
|
||||||
|
|
||||||
if media.metadata['date_original']:
|
if media.metadata['date_original']:
|
||||||
assert date_media == media.metadata['date_original']
|
assert date_media == media.metadata['date_original']
|
||||||
|
|
|
@ -0,0 +1,111 @@
|
||||||
|
# NOW
|
||||||
|
|
||||||
|
- db integrity have not to be checked in media but in collection??
|
||||||
|
|
||||||
|
- build structure to store file path and info with metadata
|
||||||
|
metadatas[file_path] = {'checksum': value}. Init must select same files than
|
||||||
|
get_metadatata
|
||||||
|
|
||||||
|
- check edit_metadata again test with valid doc
|
||||||
|
- show exif metadata
|
||||||
|
- print all values and select some to edit
|
||||||
|
- dry run = no changes
|
||||||
|
|
||||||
|
- compare custom output folder similar to?
|
||||||
|
- ordigi-gui
|
||||||
|
- add name and dirpath options???
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
Options:
|
||||||
|
--location --time
|
||||||
|
# -f overwrite metadata
|
||||||
|
--auto|-a: a set of option: geolocalisation, best match date, rename, album
|
||||||
|
from folder...
|
||||||
|
# --keep-folder option
|
||||||
|
# --rename
|
||||||
|
--confirm unsure operation
|
||||||
|
|
||||||
|
# Bugs
|
||||||
|
- summary
|
||||||
|
|
||||||
|
- set date original???, interactive mode...
|
||||||
|
|
||||||
|
- Faire en sorte que le programme ne plante pas...
|
||||||
|
- option to not update exif metadata...
|
||||||
|
## Exiftools
|
||||||
|
https://gitlab.com/TNThieding/exif
|
||||||
|
exiftool -akljklbum=tjkljkestjlj /tmp/pytest-of-cedric/pytest-12/test_sort_files0/2008-10-Oct/test_exif/2008-10-24_09-12-56-photo.nef
|
||||||
|
|
||||||
|
exiftool -album=tjkljkestjlj /tmp/pytest-of-cedric/pytest-12/test_sort_files0/2008-10-Oct/test_exif/2008-10-24_09-12-56-photo.nef
|
||||||
|
1 image files updated
|
||||||
|
|
||||||
|
Get result code....
|
||||||
|
|
||||||
|
|
||||||
|
## Doc use sphinx??
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
- ordigi view/show
|
||||||
|
- ordigi search
|
||||||
|
- use tree to show paths?
|
||||||
|
|
||||||
|
|
||||||
|
# Pylint
|
||||||
|
https://pythonspeed.com/articles/pylint/
|
||||||
|
use config file
|
||||||
|
|
||||||
|
# Media:
|
||||||
|
|
||||||
|
# Test:
|
||||||
|
|
||||||
|
# enhancement
|
||||||
|
- summary: replace success by copied/moved/deleted
|
||||||
|
|
||||||
|
## Alias
|
||||||
|
alias ogi=ordigi
|
||||||
|
|
||||||
|
## Image analysis
|
||||||
|
https://pypi.org/project/google-cloud-vision/
|
||||||
|
https://googleapis.dev/python/vision/latest/index.html
|
||||||
|
https://www.datacamp.com/community/tutorials/beginner-guide-google-vision-api
|
||||||
|
|
||||||
|
|
||||||
|
## Album form folder
|
||||||
|
|
||||||
|
# Update
|
||||||
|
|
||||||
|
https://github.com/JohannesBuchner/imagehash
|
||||||
|
https://github.com/cw-somil/Duplicate-Remover
|
||||||
|
https://leons.im/posts/a-python-implementation-of-simhash-algorithm/
|
||||||
|
|
||||||
|
Visualy check similar image
|
||||||
|
https://www.pluralsight.com/guides/importing-image-data-into-numpy-arrays
|
||||||
|
https://stackoverflow.com/questions/56056054/add-check-boxes-to-scrollable-image-in-python
|
||||||
|
https://wellsr.com/python/python-image-manipulation-with-pillow-library/
|
||||||
|
kitty gird image?
|
||||||
|
https://fr.wikibooks.org/wiki/PyQt/PyQt_versus_wxPython
|
||||||
|
https://docs.python.org/3/faq/gui.html
|
||||||
|
https://docs.opencv.org/3.4/d3/df2/tutorial_py_basic_ops.html
|
||||||
|
https://stackoverflow.com/questions/52727332/python-tkinter-create-checkbox-list-from-listbox
|
||||||
|
|
||||||
|
|
||||||
|
Image gird method:
|
||||||
|
matplot
|
||||||
|
https://gist.github.com/lebedov/7018889ba47668c64bcf96aee82caec0
|
||||||
|
|
||||||
|
Tkinter
|
||||||
|
https://python-forum.io/thread-22700.html
|
||||||
|
https://stackoverflow.com/questions/43326282/how-can-i-use-images-in-a-tkinter-grid
|
||||||
|
|
||||||
|
wxwidget
|
||||||
|
https://wxpython.org/Phoenix/docs/html/wx.lib.agw.thumbnailctrl.html
|
||||||
|
|
||||||
|
|
||||||
|
Ability to change metadata to selection
|
||||||
|
|
||||||
|
Fix: change versvalidion number to 0.x99
|
||||||
|
|
||||||
|
https://github.com/andrewning/sortphotos/blob/master/src/sortphotos.py
|
||||||
|
|
||||||
|
# AFTER
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
# Create virtual environment
|
||||||
|
nmkvirtualenv ordigi
|
||||||
|
|
||||||
|
# Work on it (activate and cd)
|
||||||
|
workon ordigi
|
||||||
|
|
||||||
|
# Install required dependencies
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
# Liked it to path
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
# View file tree of path
|
||||||
|
tree /dest/path
|
||||||
|
|
||||||
|
# Test code
|
||||||
|
pylint ordigi/* -E
|
||||||
|
pylint ordigi/**
|
||||||
|
pytest --cov=ordigi --cov-report html tests/*.py
|
||||||
|
pip install --prefix=~/.local -e .
|
||||||
|
|
||||||
|
# config
|
||||||
|
|
||||||
|
## Path format
|
||||||
|
dirs_path=<%Y>/<%m-%b>-<city>-<folder>
|
||||||
|
name=<%Y%m%d-%H%M%S>-%u<original_name>|%u<basename>.%l<ext>
|
||||||
|
|
||||||
|
|
||||||
|
## run
|
||||||
|
ordigi import 220719.bkp -f -c -R collection
|
Loading…
Reference in New Issue