Another Ham Radio Logbook -- Web, Multi-user multiple-logbook, with eQSL upload support
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

crons.py 16KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433
  1. from __future__ import print_function
  2. import gzip
  3. import os
  4. import shutil
  5. import urllib.parse
  6. import urllib.error
  7. import urllib.request
  8. import xml.etree.ElementTree as ElementTree
  9. import re
  10. import datetime
  11. from dateutil import parser
  12. from flask import current_app
  13. from libjambon import eqsl_upload_log, get_dxcc_from_clublog_or_database
  14. from models import db, DxccEntities, DxccExceptions, DxccPrefixes, Log, Config, UserLogging, User, Logbook
  15. from utils import add_log
  16. from adif import parse as parse_adif
  17. from pyhamqth import HamQTH, HamQTHQueryFailed
  18. def update_qsos_from_hamqth():
  19. users = User.query.filter(User.hamqth_name.isnot(None), User.hamqth_password.isnot(None)).all()
  20. for user in users:
  21. updated = 0
  22. logs = Log.query.filter(Log.consolidated_hamqth.is_(False), Log.user_id == user.id).all()
  23. _v = "AHRL"
  24. _hq = HamQTH(user=user.hamqth_name, password=user.hamqth_password, user_agent_suffix=_v)
  25. for log in logs:
  26. if not log.call:
  27. pass
  28. try:
  29. _csd = _hq.lookup_callsign_data(log.call)
  30. except HamQTHQueryFailed as e:
  31. print("Failed for {0}: {1}".format(log.call, e))
  32. err = UserLogging()
  33. err.user_id = log.user.id
  34. err.log_id = log.id
  35. err.logbook_id = log.logbook.id
  36. err.category = "HamQTH"
  37. err.level = "ERROR"
  38. err.message = "Query failed or call not found: {0}".format(e)
  39. db.session.add(err)
  40. log.consolidated_hamqth = True
  41. continue
  42. if "nick" in _csd and not log.name:
  43. log.name = _csd["nick"]
  44. if "qth" in _csd and not log.qth:
  45. log.qth = _csd["qth"]
  46. if "grid" in _csd and not log.gridsquare:
  47. log.gridsquare = _csd["grid"]
  48. # if 'country' in _csd and not log.country:
  49. # log.country = _csd['country']
  50. # We must leave country filled by DXCC Clublog or Database I think finally
  51. if "latitude" in _csd and not log.lat:
  52. log.lat = _csd["latitude"]
  53. if "longitude" in _csd and not log.lon:
  54. log.lon = _csd["longitude"]
  55. if "web" in _csd and not log.web:
  56. log.web = _csd["web"]
  57. if "iota" in _csd and not log.iota:
  58. log.iota = _csd["iota"]
  59. log.consolidated_hamqth = True
  60. updated += 1
  61. db.session.commit()
  62. print("Updated {0} QSOs for {1}".format(updated, user.name))
  63. def update_qsos_without_countries():
  64. updated = 0
  65. logs = Log.query.filter(Log.country.is_(None) | Log.dxcc.is_(None) | Log.cqz.is_(None)).all()
  66. for log in logs:
  67. if not log.call:
  68. continue
  69. dxcc = get_dxcc_from_clublog_or_database(log.call)
  70. log.dxcc = dxcc["DXCC"]
  71. log.cqz = dxcc["CQZ"]
  72. log.country = dxcc["Name"]
  73. log.cont = dxcc["Continent"]
  74. db.session.commit()
  75. updated += 1
  76. print("Updated {0} QSOs".format(updated))
  77. def populate_logs_gridsquare_cache():
  78. updated = 0
  79. logs = Log.query.filter(Log.cache_gridsquare.is_(None)).all()
  80. for log in logs:
  81. qth = log.country_grid_coords()
  82. if not qth:
  83. print("!! country_grid_coords() for log {0} returned None, please check !!".format(log.id))
  84. continue
  85. log.cache_gridsquare = qth["qth"]
  86. updated += 1
  87. db.session.commit()
  88. print("-- Updated {0} QSOs".format(updated))
  89. def update_dxcc_from_cty_xml(_file=None, silent=False):
  90. if not silent:
  91. print("--- Updating DXCC tables (prefixes, entities, exceptions) from cty.xml")
  92. fname = os.path.join(current_app.config["TEMP_DOWNLOAD_FOLDER"], "cty.xml")
  93. config = Config.query.first()
  94. if not config:
  95. if not silent:
  96. print("!!! Error: config not found")
  97. add_log(category="CONFIG", level="ERROR", message="Config not found")
  98. raise Exception("config not found")
  99. if os.path.isfile(fname):
  100. os.remove(fname)
  101. if not silent:
  102. print("-- Removed old file {0}".format(fname))
  103. if not _file:
  104. if not silent:
  105. print("-- Downloading...")
  106. if not config.clublog_api_key:
  107. if not silent:
  108. print("!! Clublog API Key not defined")
  109. add_log(category="CRONS", level="ERROR", message="Clublog API Key not defined")
  110. raise Exception("no clublog_api_key in config")
  111. url = "https://secure.clublog.org/cty.php?api={0}".format(config.clublog_api_key)
  112. try:
  113. with urllib.request.urlopen(url) as response, open(fname, "wb") as out_file:
  114. with gzip.GzipFile(fileobj=response) as uncompressed:
  115. shutil.copyfileobj(uncompressed, out_file)
  116. except urllib.error.URLError as err:
  117. if not silent:
  118. print("!! Error: {0}".format(err))
  119. raise Exception(f"error: {err}")
  120. if not silent:
  121. print("-- File downloaded at {0}".format(fname))
  122. elif os.path.isfile(_file):
  123. fname = _file
  124. if not silent:
  125. print("-- File at {0}".format(fname))
  126. else:
  127. if not silent:
  128. print("-- what are you trying to do ?")
  129. raise Exception("unknown error")
  130. # Now parse XML file
  131. tree = None
  132. try:
  133. tree = ElementTree.parse(fname)
  134. except FileNotFoundError as err:
  135. if not silent:
  136. print("!! Error: {0}".format(err))
  137. raise Exception(f"file not found: {err}")
  138. except ElementTree.ParseError as err:
  139. if not silent:
  140. print("!! Error: {0}".format(err))
  141. raise Exception(f"XML Parsing error: {err}")
  142. if not tree:
  143. raise Exception("XML tree is none")
  144. root = tree.getroot()
  145. for element in root:
  146. if element.tag == "{http://www.clublog.org/cty/v1.0}entities":
  147. if not silent:
  148. print("++ Parsing {0}".format(element.tag))
  149. rmed = DxccEntities.query.delete()
  150. if not silent:
  151. print("-- Cleaned {0} old entries".format(rmed))
  152. parse_element(element, silent)
  153. elif element.tag == "{http://www.clublog.org/cty/v1.0}exceptions":
  154. if not silent:
  155. print("++ Parsing {0}".format(element.tag))
  156. rmed = DxccExceptions.query.delete()
  157. if not silent:
  158. print("-- Cleaned {0} old entries".format(rmed))
  159. parse_element(element, silent)
  160. elif element.tag == "{http://www.clublog.org/cty/v1.0}prefixes":
  161. if not silent:
  162. print("++ Parsing {0}".format(element.tag))
  163. rmed = DxccPrefixes.query.delete()
  164. if not silent:
  165. print("-- Cleaned {0} old entries".format(rmed))
  166. parse_element(element, silent)
  167. def parse_element(element, silent=False):
  168. elements = 0
  169. for child in element:
  170. skip = False
  171. if element.tag == "{http://www.clublog.org/cty/v1.0}entities":
  172. _obj = DxccEntities()
  173. _obj.ituz = 999 # We don't have ITUZ in cty.xml so we put 999 in it
  174. elif element.tag == "{http://www.clublog.org/cty/v1.0}exceptions":
  175. _obj = DxccExceptions()
  176. elif element.tag == "{http://www.clublog.org/cty/v1.0}prefixes":
  177. _obj = DxccPrefixes()
  178. else:
  179. return
  180. if "record" in child.attrib:
  181. _obj.record = child.attrib["record"]
  182. for attr in child:
  183. if attr.tag == "{http://www.clublog.org/cty/v1.0}call":
  184. _obj.call = attr.text
  185. elif attr.tag == "{http://www.clublog.org/cty/v1.0}name":
  186. _obj.name = attr.text
  187. elif attr.tag == "{http://www.clublog.org/cty/v1.0}prefix":
  188. _obj.prefix = attr.text
  189. elif attr.tag == "{http://www.clublog.org/cty/v1.0}entity":
  190. if attr.text == "INVALID":
  191. skip = True
  192. _obj.entity = attr.text
  193. elif attr.tag == "{http://www.clublog.org/cty/v1.0}adif":
  194. _obj.adif = int(attr.text)
  195. elif attr.tag == "{http://www.clublog.org/cty/v1.0}cqz":
  196. _obj.cqz = float(attr.text)
  197. elif attr.tag == "{http://www.clublog.org/cty/v1.0}cont":
  198. _obj.cont = attr.text
  199. elif attr.tag == "{http://www.clublog.org/cty/v1.0}long":
  200. _obj.long = float(attr.text)
  201. elif attr.tag == "{http://www.clublog.org/cty/v1.0}lat":
  202. _obj.lat = float(attr.text)
  203. elif attr.tag == "{http://www.clublog.org/cty/v1.0}start":
  204. _obj.start = parser.parse(attr.text)
  205. elif attr.tag == "{http://www.clublog.org/cty/v1.0}end":
  206. _obj.start = parser.parse(attr.text)
  207. if not _obj.adif:
  208. _obj.adif = 999
  209. elif not _obj.cqz:
  210. _obj.cqz = 999
  211. if skip:
  212. continue # We have god an entity=INVALID, skip it
  213. db.session.add(_obj)
  214. elements += 1
  215. db.session.commit()
  216. if not silent:
  217. print("-- Committed {0} new elements".format(elements))
  218. def cron_sync_from_eqsl(dry_run=False):
  219. """ https://www.eqsl.cc/qslcard/DownloadInBox.txt """
  220. """
  221. todo: to avoid downloading too much (not really in fact) logs, get the oldest time with a eqsl_rcvd == 'N'
  222. and put it in query with RcvdSince = (YYYYMMDDHHMM)
  223. """
  224. if dry_run:
  225. print("-- [DRY RUN] Fetching logs from eQSL")
  226. else:
  227. print("-- Fetching logs from eQSL")
  228. _logbooks = Logbook.query.filter(Logbook.eqsl_qth_nickname.isnot(None)).all()
  229. for logbook in _logbooks:
  230. if not logbook.user.eqsl_name or not logbook.user.eqsl_password:
  231. continue # Skip logbooks of user not using eQSL
  232. config = Config.query.first()
  233. if not config:
  234. print("!!! Error: config not found")
  235. add_log(category="CONFIG", level="ERROR", message="Config not found")
  236. return
  237. print("-- Working on logbook [{0}] {1}".format(logbook.id, logbook.name))
  238. _payload = urllib.parse.urlencode(
  239. {
  240. "UserName": logbook.user.eqsl_name,
  241. "Password": logbook.user.eqsl_password,
  242. "QTHNickname": logbook.eqsl_qth_nickname,
  243. }
  244. )
  245. _url = "{0}?{1}".format(config.eqsl_download_url, _payload)
  246. _req = urllib.request.Request(_url)
  247. _text = None
  248. err_fetch = UserLogging()
  249. err_fetch.user_id = logbook.user.id
  250. err_fetch.logbook_id = logbook.id
  251. err_fetch.category = "EQSL FETCH"
  252. try:
  253. with urllib.request.urlopen(_req) as f:
  254. _text = f.read().decode("UTF-8")
  255. except urllib.error.URLError as e:
  256. err_fetch.level = "ERROR"
  257. err_fetch.message = "Error fetching from eQSL: {0}".format(e)
  258. db.session.add(err_fetch)
  259. db.session.commit()
  260. continue # skip to next
  261. if not _text:
  262. err_fetch.level = "ERROR"
  263. err_fetch.message = "Error fetching from EQSL, _text undefined"
  264. db.session.add(err_fetch)
  265. db.session.commit()
  266. continue # skip to next
  267. # Now get the download link
  268. # <li><a href="downloadedfiles/xxx.adi">.ADI file</a>
  269. m = re.search('<A HREF="(.*)">.ADI file</A>', _text)
  270. if m:
  271. _file_path = m.group(1)
  272. _url = "{0}/{1}".format(os.path.dirname(config.eqsl_download_url), _file_path)
  273. _req = urllib.request.Request(_url)
  274. _text = None
  275. try:
  276. print("-- Fetching ADIF {0}".format(_url))
  277. with urllib.request.urlopen(_req) as f:
  278. # eQSL returns a file encoded in ISO8859-1 so decode it then re-encode it in UTF-8
  279. _text = f.read().decode("ISO8859-1").encode("UTF-8")
  280. except urllib.error.URLError as e:
  281. err_fetch.level = "ERROR"
  282. err_fetch.message = "Error fetching from eQSL: {0}".format(e)
  283. db.session.add(err_fetch)
  284. db.session.commit()
  285. continue # skip to next
  286. if not _text:
  287. err_fetch.level = "ERROR"
  288. err_fetch.message = "Error fetching from EQSL, _text for final URL undefined"
  289. db.session.add(err_fetch)
  290. db.session.commit()
  291. continue # skip to next
  292. adif = parse_adif(_text)
  293. for log in adif:
  294. err_log = UserLogging()
  295. err_log.user_id = logbook.user.id
  296. err_log.logbook_id = logbook.id
  297. err_log.category = "EQSL LOG"
  298. _date = "{0} {1}".format(log["qso_date"], log["time_on"])
  299. _date_first = datetime.datetime.strptime(_date + "00", "%Y%m%d %H%M%S")
  300. _date_second = datetime.datetime.strptime(_date + "59", "%Y%m%d %H%M%S")
  301. # Try to find a matching log entry
  302. qso = Log.query.filter(
  303. Log.logbook_id == logbook.id,
  304. Log.user_id == logbook.user.id,
  305. Log.call == log["call"].upper(),
  306. Log.time_on.between(_date_first, _date_second),
  307. ).first()
  308. if qso:
  309. if qso.eqsl_qsl_rcvd == "Y":
  310. continue # this eQSL have already been matched
  311. print("-- Matching log found for {0} on {1} : ID {2}".format(log["call"], _date, qso.id))
  312. if not dry_run:
  313. qso.eqsl_qsl_rcvd = "Y"
  314. err_log.log_id = qso.id
  315. err_log.level = "INFO"
  316. err_log.message = "QSO from eQSL by {0} on {1} received and updated".format(log["call"], _date)
  317. else:
  318. print("-- No matching log found for {0} on {1}".format(log["call"], _date))
  319. err_log.level = "INFO"
  320. err_log.message = "QSO from eQSL by {0} on {1} not found in database".format(log["call"], _date)
  321. if not dry_run:
  322. db.session.add(err_log)
  323. db.session.commit()
  324. else:
  325. err_fetch.level = "ERROR"
  326. err_fetch.message = "Error fetching from EQSL, link not found in body"
  327. db.session.add(err_fetch)
  328. db.session.commit()
  329. def cron_sync_eqsl(dry_run=False):
  330. """ https://www.eqsl.cc/qslcard/ImportADIF.txt """
  331. if dry_run:
  332. print("--- [DRY RUN] Sending logs to eQSL when requested")
  333. else:
  334. print("--- Sending logs to eQSL when requested")
  335. logs = Log.query.filter(Log.eqsl_qsl_sent == "R").all()
  336. config = Config.query.first()
  337. if not config:
  338. print("!!! Error: config not found")
  339. add_log(category="CONFIG", level="ERROR", message="Config not found")
  340. return
  341. for log in logs:
  342. status = eqsl_upload_log(log, config, dry_run)
  343. if dry_run:
  344. continue
  345. err = UserLogging()
  346. err.user_id = log.user.id
  347. err.log_id = log.id
  348. err.logbook_id = log.logbook.id
  349. err.category = "EQSL"
  350. if status["state"] == "error":
  351. err.level = "ERROR"
  352. print("!! Error uploading QSO {0} to eQSL: {1}".format(log.id, status["message"]))
  353. elif status["state"] == "rejected":
  354. log.eqsl_qsl_sent = "I"
  355. print("!! Rejected uploading QSO {0} to eQSL: {1}".format(log.id, status["message"]))
  356. else:
  357. err.level = "INFO"
  358. err.message = status["message"] + "\r\n"
  359. if "msgs" in status:
  360. for i in status["msgs"]:
  361. print("!! {0}: {1}".format(i[0], i[1]))
  362. err.message += "{0}: {1}\r\n".format(i[0], i[1])
  363. if status["state"] == "success":
  364. log.eqsl_qsl_sent = "Y"
  365. print(status)
  366. db.session.add(err)
  367. db.session.commit()