Compare commits
7 Commits
d711766571
...
master
Author | SHA1 | Date | |
---|---|---|---|
344455e409 | |||
654f580fc5 | |||
107e1241d7 | |||
110f18a62f | |||
ddd2daf8cf | |||
1b8674a6c0 | |||
ca69ec961e |
77
init.sql
77
init.sql
@ -69,3 +69,80 @@ select
|
||||
join training t on (t.id = td_start.training_id)
|
||||
order by td_start.t asc
|
||||
);
|
||||
|
||||
---------------------------------------------
|
||||
-- trovare allenamenti usando georeference --
|
||||
---------------------------------------------
|
||||
-- COSTANTI
|
||||
-- via mirabello (dentro al parco): st_point(45.607115, 9.283687)
|
||||
-- rotatoria Tamoil (per la via del nord): st_point(45.622779, 9.276274)
|
||||
|
||||
-- giro del parco di monza
|
||||
create or replace view parco_monza_classico as
|
||||
(
|
||||
select duration,
|
||||
distance,
|
||||
pace_kmh,
|
||||
start_time,
|
||||
end_time,
|
||||
start_location,
|
||||
end_location,
|
||||
training_id
|
||||
from training_info ti
|
||||
where ti.training_id in (
|
||||
-- trovo id allenamenti che passano sicuramente per il parco di monza
|
||||
select td.training_id
|
||||
from training_data td
|
||||
join training_data td2 on (td.training_id = td2.training_id)
|
||||
where
|
||||
distance < 31 and
|
||||
st_dwithin(td.geog, st_point(45.607115, 9.283687), 20) -- se passo a 20 metri da via mirabello dentro al parco
|
||||
and not st_dwithin(td2.geog, st_point(45.622779, 9.276274), 20) -- e se non passo dal tamoil
|
||||
)
|
||||
);
|
||||
|
||||
-- giro di arcore
|
||||
create or replace view arcore as
|
||||
(
|
||||
select duration,
|
||||
distance,
|
||||
pace_kmh,
|
||||
start_time,
|
||||
end_time,
|
||||
start_location,
|
||||
end_location,
|
||||
training_id
|
||||
from training_info ti
|
||||
where ti.training_id in (
|
||||
select td.training_id
|
||||
from training_data td join training_data td2 on (td.training_id = td2.training_id)
|
||||
where
|
||||
distance < 42 and
|
||||
end_location = 'Bicocca' and
|
||||
st_dwithin(td.geog, st_point(45.622779, 9.276274), 20) -- se passo a 20 metri dal Tamoil
|
||||
and st_dwithin(td2.geog, st_point(45.631299, 9.308985), 30) -- e se passo a 30 metri dalla rotatoria per arcore
|
||||
)
|
||||
);
|
||||
|
||||
create or replace view monticello as
|
||||
(
|
||||
select duration,
|
||||
distance,
|
||||
pace_kmh,
|
||||
start_time,
|
||||
end_time,
|
||||
start_location,
|
||||
end_location,
|
||||
training_id
|
||||
from training_info ti
|
||||
where ti.training_id in (
|
||||
select td.training_id
|
||||
from training_data td join training_data td2 on (td.training_id = td2.training_id) join training_data td3 on (td.training_id = td3.training_id)
|
||||
where
|
||||
distance < 61 and
|
||||
end_location = 'Bicocca' and
|
||||
st_dwithin(td.geog, st_point(45.622779, 9.276274), 20) -- se passo per il Tamoil
|
||||
and st_dwithin(td2.geog, st_point(45.631299, 9.308985), 30) -- e se passo a 30 metri dalla rotatoria per arcore
|
||||
and st_dwithin(td3.geog, st_point(45.705275, 9.305889), 20) -- e se passo a 20 metri dalla fontanella di Monticello
|
||||
)
|
||||
);
|
126
main.py
126
main.py
@ -7,31 +7,31 @@ from glob import glob
|
||||
from email.message import Message
|
||||
|
||||
from sqlalchemy import create_engine, text
|
||||
from sqlalchemy.engine import Connection
|
||||
import gpxpy
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
config.read('config.ini')
|
||||
config.read("config.ini")
|
||||
|
||||
db = create_engine(f"postgresql://{config['db']['username']}:{config['db']['password']}@{config['db']['host']}/{config['db']['database']}").connect()
|
||||
db = create_engine(
|
||||
f"postgresql://{config['db']['username']}:{config['db']['password']}@{config['db']['host']}/{config['db']['database']}"
|
||||
).connect()
|
||||
|
||||
mail = IMAP4(host=config['mail']['host'])
|
||||
fitotrack_msg_filter = 'ALL'
|
||||
mail = IMAP4(host=config["mail"]["host"])
|
||||
fitotrack_msg_filter = "ALL"
|
||||
|
||||
|
||||
def init_database():
|
||||
with open('init.sql') as f:
|
||||
db.execute('\n'.join(f.readlines()))
|
||||
with open("init.sql") as f:
|
||||
db.execute("\n".join(f.readlines()))
|
||||
|
||||
|
||||
def _get_sender(msg: Message) -> str:
|
||||
sender: str = msg.get('from')
|
||||
if ' ' in sender:
|
||||
sender = sender.split(' ')
|
||||
sender: str = msg.get("from")
|
||||
if " " in sender:
|
||||
sender = sender.split(" ")
|
||||
for field in sender:
|
||||
if '@' in field and '<' in field and '>' in field:
|
||||
if "@" in field and "<" in field and ">" in field:
|
||||
return field[1:-1]
|
||||
|
||||
return sender
|
||||
@ -39,87 +39,113 @@ def _get_sender(msg: Message) -> str:
|
||||
|
||||
def get_gpx_files_from_mail():
|
||||
mail.starttls(ssl.create_default_context())
|
||||
mail.login(config['mail']['username'], config['mail']['password'])
|
||||
mail.login(config["mail"]["username"], config["mail"]["password"])
|
||||
|
||||
mail.select()
|
||||
_, ids = mail.search(None, fitotrack_msg_filter)
|
||||
ids = ids[0].split()
|
||||
for i in ids:
|
||||
_, fetched = mail.fetch(i, '(RFC822)')
|
||||
_, fetched = mail.fetch(i, "(RFC822)")
|
||||
email_message = email.message_from_bytes(fetched[0][1])
|
||||
sender = _get_sender(email_message)
|
||||
for part in email_message.walk():
|
||||
if part.get_content_maintype() == 'multipart' or part.get_content_disposition() is None:
|
||||
if (
|
||||
part.get_content_maintype() == "multipart"
|
||||
or part.get_content_disposition() is None
|
||||
):
|
||||
continue
|
||||
filename = part.get_filename()
|
||||
|
||||
if filename:
|
||||
filename = f'{sender}_{filename}'
|
||||
if not os.path.exists(f'gpx_files/{filename}'):
|
||||
with open(f'gpx_files/{filename}', 'wb') as f:
|
||||
print(f'creating {filename}')
|
||||
filename = f"{sender}_{filename}"
|
||||
if not os.path.exists(f"gpx_files/{filename}"):
|
||||
with open(f"gpx_files/{filename}", "wb") as f:
|
||||
print(f"creating {filename}")
|
||||
f.write(part.get_payload(decode=True))
|
||||
|
||||
mail.store(i, '+FLAGS', '\\Deleted')
|
||||
mail.store(i, "+FLAGS", "\\Deleted")
|
||||
|
||||
mail.expunge()
|
||||
mail.close()
|
||||
mail.logout()
|
||||
|
||||
|
||||
def process_gpx_files(tx: Connection):
|
||||
for filepath in glob('gpx_files/*.gpx'):
|
||||
owner = os.path.split(filepath)[-1].split('_workout-')[0]
|
||||
def process_gpx_files():
|
||||
for filepath in glob("gpx_files/*.gpx"):
|
||||
owner = os.path.split(filepath)[-1].split("_workout-")[0]
|
||||
filename = f'workout-{os.path.split(filepath)[-1].split("_workout-")[1]}'
|
||||
print(f'Processing {filename}')
|
||||
if list(tx.execute(text('select exists(select from training where owner = :owner and filename = :filename)'),
|
||||
dict(owner=owner, filename=filename,),),)[0][0]:
|
||||
print(f"Processing {filename}")
|
||||
if list(
|
||||
db.execute(
|
||||
text(
|
||||
"select exists(select from training where owner = :owner and filename = :filename)"
|
||||
),
|
||||
dict(
|
||||
owner=owner,
|
||||
filename=filename,
|
||||
),
|
||||
),
|
||||
)[0][0]:
|
||||
os.remove(filepath)
|
||||
continue
|
||||
with open(filepath) as f:
|
||||
gpx_file = gpxpy.parse(f)
|
||||
if gpx_file.creator != 'FitoTrack':
|
||||
raise ValueError('gpx file not generated by the FitoTrack app')
|
||||
training_id = list(tx.execute(
|
||||
text("""
|
||||
if gpx_file.creator != "FitoTrack":
|
||||
raise ValueError("gpx file not generated by the FitoTrack app")
|
||||
training_id = list(
|
||||
db.execute(
|
||||
text(
|
||||
"""
|
||||
insert into training(owner, filename, type, description, moving_time, stopped_time, moving_distance, stopped_distance) values
|
||||
(:owner, :filename, :type, :description, :moving_time, :stopped_time, :moving_distance, :stopped_distance) returning id
|
||||
"""),
|
||||
dict(owner=owner,
|
||||
filename=filename,
|
||||
type='cycling', # TODO other training types
|
||||
description=gpx_file.description,
|
||||
moving_time=gpx_file.get_moving_data().moving_time,
|
||||
stopped_time=gpx_file.get_moving_data().stopped_time,
|
||||
moving_distance=gpx_file.get_moving_data().moving_distance,
|
||||
stopped_distance=gpx_file.get_moving_data().stopped_distance,),
|
||||
))[0][0]
|
||||
"""
|
||||
),
|
||||
dict(
|
||||
owner=owner,
|
||||
filename=filename,
|
||||
type="cycling", # TODO other training types
|
||||
description=gpx_file.description,
|
||||
moving_time=gpx_file.get_moving_data().moving_time,
|
||||
stopped_time=gpx_file.get_moving_data().stopped_time,
|
||||
moving_distance=gpx_file.get_moving_data().moving_distance,
|
||||
stopped_distance=gpx_file.get_moving_data().stopped_distance,
|
||||
),
|
||||
)
|
||||
)[0][0]
|
||||
for track in gpx_file.tracks:
|
||||
for segment in track.segments:
|
||||
for point in segment.points:
|
||||
tx.execute(text("""
|
||||
db.execute(
|
||||
text(
|
||||
"""
|
||||
insert into training_data(training_id, t, geog, speed, elevation)
|
||||
values (:training_id, :t, :geog, :speed, :elevation)
|
||||
"""),
|
||||
dict(training_id=training_id,
|
||||
t=point.time,
|
||||
geog=f'POINT({point.latitude} {point.longitude})',
|
||||
speed=point.speed,
|
||||
elevation=point.elevation,),)
|
||||
"""
|
||||
),
|
||||
dict(
|
||||
training_id=training_id,
|
||||
t=point.time,
|
||||
geog=f"POINT({point.latitude} {point.longitude})",
|
||||
speed=point.speed,
|
||||
elevation=point.elevation,
|
||||
),
|
||||
)
|
||||
os.remove(filepath)
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
os.mkdir('gpx_files')
|
||||
os.mkdir("gpx_files")
|
||||
except FileExistsError:
|
||||
pass
|
||||
init_database()
|
||||
get_gpx_files_from_mail()
|
||||
db.transaction(process_gpx_files)
|
||||
|
||||
with db.begin():
|
||||
process_gpx_files()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
|
@ -4,3 +4,4 @@ psycopg2
|
||||
bs4
|
||||
sqlparse
|
||||
tabulate
|
||||
black
|
Reference in New Issue
Block a user