9) and self.Counts.Current % 100 is 0:
+ self.log_update()
+ return EvernoteAPIStatus.UnchangedError if error_if_unchanged else EvernoteAPIStatus.Unchanged, self.note.id
+ if not self.Changed:
+ # i.e., the note deck has been changed but the tags and fields have not
+ self.Counts.Updated += 1
+ return EvernoteAPIStatus.UnchangedError if error_if_unchanged else EvernoteAPIStatus.Success, self.note.id
+ if not self.OriginalGuid:
+ flds = get_dict_from_list(self.BaseNote.items())
+ self.OriginalGuid = get_evernote_guid_from_anki_fields(flds)
+ db_title = get_evernote_title_from_guid(self.OriginalGuid)
+ self.check_titles_equal(db_title, self.FullTitle, self.Guid)
+ self.note.flush(intTime())
+ self.log_update(" > Flushing Note")
+ self.update_note_model()
+ self.Counts.Updated += 1
+ return EvernoteAPIStatus.Success, self.note.id
+
+ def check_titles_equal(self, old_title, new_title, new_guid, log_title='DB INFO UNEQUAL'):
+ do_log_title = False
+ try:
+ new_title = decode(new_title)
+ except Exception:
+ do_log_title = True
+ try:
+ old_title = decode(old_title)
+ except Exception:
+ do_log_title = True
+ guid_text = '' if self.OriginalGuid is None else ' ' + self.OriginalGuid + (
+ '' if new_guid == self.OriginalGuid else ' vs %s' % new_guid) + ':'
+ if do_log_title or new_title != old_title or (self.OriginalGuid and new_guid != self.OriginalGuid):
+ log_str = ' %s: %s%s' % (
+ '*' if do_log_title else ' ' + log_title, guid_text, ' ' + new_title + ' vs ' + old_title)
+ log_error(log_str, crosspost_to_default=False)
+ self.log_update(log_str)
+ return False
+ return True
+
+ @property
+ def Title(self):
+ """:rtype : EvernoteNoteTitle.EvernoteNoteTitle """
+ title = ""
+ if FIELDS.TITLE in self.Fields:
+ title = self.Fields[FIELDS.TITLE]
+ elif self.BaseNote:
+ title = self.originalFields[FIELDS.TITLE]
+ return EvernoteNoteTitle(title)
+
+ @property
+ def FullTitle(self): return self.Title.FullTitle
+
+ def save_anki_fields_decoded(self, attempt, from_anp_fields=False, do_decode=None):
+ title = self.db_title if hasattr(self, 'db_title') else self.FullTitle
+ e_return = False
+ log_header = 'ANKI-->ANP-->'
+ if from_anp_fields:
+ log_header += 'CREATE ANKI FIELDS'
+ base_values = self.Fields.items()
+ else:
+ log_header += 'SAVE ANKI FIELDS (DECODED)'
+ base_values = enumerate(self.note.fields)
+ for key, value in base_values:
+ name = key if from_anp_fields else FIELDS.LIST[key - 1] if key > 0 else FIELDS.EVERNOTE_GUID
+ if isinstance(value, unicode) and not do_decode is True:
+ action = 'ENCODING'
+ elif isinstance(value, str) and not do_decode is False:
+ action = 'DECODING'
+ else:
+ action = 'DOING NOTHING'
+ log('\t - %s for %s field %s' % (action, value.__class__.__name__, name), 'unicode', timestamp=False)
+ if action is not 'DOING NOTHING':
+ try:
+ new_value = encode(value) if action == 'ENCODED' else decode(value)
+ if from_anp_fields:
+ self.note[key] = new_value
+ else:
+ self.note.fields[key] = new_value
+ except (UnicodeDecodeError, UnicodeEncodeError, UnicodeTranslateError, UnicodeError, Exception) as e:
+ e_return = HandleUnicodeError(log_header, e, self.Guid, title, action, attempt, value, field=name)
+ if e_return is not 1:
+ raise
+ if e_return is not False:
+ log_blank('unicode')
+ return 1
+
+ def add_note_try(self, attempt=1):
+ title = self.db_title if hasattr(self, 'db_title') else self.FullTitle
+ col = self.Anki.collection()
+ log_header = 'ANKI-->ANP-->ADD NOTE FAILED'
+ action = 'DECODING?'
+ try:
+ col.addNote(self.note)
+ except (UnicodeDecodeError, UnicodeEncodeError, UnicodeTranslateError, UnicodeError, Exception), e:
+ e_return = HandleUnicodeError(log_header, e, self.Guid, title, action, attempt, self.note[FIELDS.TITLE])
+ if e_return is not 1:
+ raise
+ self.save_anki_fields_decoded(attempt + 1)
+ return self.add_note_try(attempt + 1)
+ return 1
+
+ def add_note(self):
+ self.create_note()
+ if self.note is None:
+ return EvernoteAPIStatus.NotFoundError, None
+ collection = self.Anki.collection()
+ db_title = get_evernote_title_from_guid(self.Guid)
+ log(' %s: ADD: ' % self.Guid + ' ' + self.FullTitle, self.__log_name)
+ self.check_titles_equal(db_title, self.FullTitle, self.Guid, 'NEW NOTE TITLE UNEQUAL TO DB ENTRY')
+ if self.add_note_try() is not 1:
+ return EvernoteAPIStatus.GenericError, None
+ collection.autosave()
+ self.Anki.start_editing()
+ return EvernoteAPIStatus.Success, self.note.id
+
+ def create_note(self, attempt=1):
+ id_deck = self.Anki.decks().id(self.deck())
+ if not self.ModelName:
+ self.ModelName = MODELS.DEFAULT
+ model = self.Anki.models().byName(self.ModelName)
+ col = self.Anki.collection()
+ self.note = AnkiNote(col, model)
+ self.note.model()['did'] = id_deck
+ self.note.tags = self.Tags
+ title = self.db_title if hasattr(self, 'db_title') else self.FullTitle
+ self.save_anki_fields_decoded(attempt, True, True)
diff --git a/anknotes/Controller.py b/anknotes/Controller.py
new file mode 100644
index 0000000..3b74e10
--- /dev/null
+++ b/anknotes/Controller.py
@@ -0,0 +1,223 @@
+# -*- coding: utf-8 -*-
+### Python Imports
+import socket
+from datetime import datetime
+
+try:
+ from pysqlite2 import dbapi2 as sqlite
+except ImportError:
+ from sqlite3 import dbapi2 as sqlite
+
+### Anknotes Shared Imports
+from anknotes.shared import *
+from anknotes.error import *
+
+### Anknotes Class Imports
+from anknotes.AnkiNotePrototype import AnkiNotePrototype
+from anknotes.EvernoteNotePrototype import EvernoteNotePrototype
+from anknotes.EvernoteNoteTitle import generateTOCTitle
+from anknotes import stopwatch
+### Anknotes Main Imports
+from anknotes.Anki import Anki
+from anknotes.ankEvernote import Evernote
+from anknotes.EvernoteNotes import EvernoteNotes
+from anknotes.EvernoteNoteFetcher import EvernoteNoteFetcher
+from anknotes import settings
+from anknotes.EvernoteImporter import EvernoteImporter
+
+### Evernote Imports
+from anknotes.evernote.edam.notestore.ttypes import NoteFilter, NotesMetadataResultSpec
+from anknotes.evernote.edam.type.ttypes import NoteSortOrder, Note as EvernoteNote
+from anknotes.evernote.edam.error.ttypes import EDAMSystemException
+
+### Anki Imports
+from aqt import mw
+
+
+# load_time = datetime.now()
+# log("Loaded controller at " + load_time.isoformat(), 'import')
+class Controller:
+ evernoteImporter = None
+ """:type : EvernoteImporter"""
+
+ def __init__(self):
+ self.forceAutoPage = False
+ self.auto_page_callback = None
+ self.anki = Anki()
+ self.anki.deck = SETTINGS.ANKI.DECKS.BASE.fetch()
+ self.anki.setup_ancillary_files()
+ ankDB().Init()
+ self.anki.add_evernote_models()
+ self.evernote = Evernote()
+
+ def test_anki(self, title, evernote_guid, filename=""):
+ if not filename:
+ filename = title
+ fields = {
+ FIELDS.TITLE: title,
+ FIELDS.CONTENT: file(
+ os.path.join(FOLDERS.LOGS, filename.replace('.enex', '') + ".enex"),
+ 'r').read(), FIELDS.EVERNOTE_GUID: FIELDS.EVERNOTE_GUID_PREFIX + evernote_guid
+ }
+ tags = ['NoTags', 'NoTagsToRemove']
+ return AnkiNotePrototype(self.anki, fields, tags)
+
+ def process_unadded_see_also_notes(self):
+ update_regex()
+ anki_note_ids = self.anki.get_anknotes_note_ids_with_unadded_see_also()
+ self.evernote.getNoteCount = 0
+ self.anki.process_see_also_content(anki_note_ids)
+
+ def upload_validated_notes(self, automated=False):
+ db = ankDB(TABLES.NOTE_VALIDATION_QUEUE)
+ dbRows = db.all("validation_status = 1")
+ notes_created, notes_updated, queries1, queries2 = ([] for i in range(4))
+ """
+ :type: (list[EvernoteNote], list[EvernoteNote], list[str], list[str])
+ """
+ noteFetcher = EvernoteNoteFetcher()
+ tmr = stopwatch.Timer(len(dbRows), 25, infoStr="Upload of Validated Evernote Notes", automated=automated,
+ enabled=EVERNOTE.UPLOAD.ENABLED, max_allowed=EVERNOTE.UPLOAD.MAX,
+ label='Validation\\upload_validated_notes\\', display_initial_info=True)
+ if tmr.actionInitializationFailed:
+ return tmr.status, 0, 0
+ for dbRow in dbRows:
+ entry = EvernoteValidationEntry(dbRow)
+ evernote_guid, rootTitle, contents, tagNames, notebookGuid, noteType = entry.items()
+ tagNames = tagNames.split(',')
+ if not tmr.checkLimits():
+ break
+ whole_note = tmr.autoStep(
+ self.evernote.makeNote(rootTitle, contents, tagNames, notebookGuid, guid=evernote_guid,
+ noteType=noteType, validated=True), rootTitle, evernote_guid)
+ if tmr.report_result is False:
+ raise ValueError
+ if tmr.status.IsDelayableError:
+ break
+ if not tmr.status.IsSuccess:
+ continue
+ if not whole_note.tagNames:
+ whole_note.tagNames = tagNames
+ noteFetcher.addNoteFromServerToDB(whole_note, tagNames)
+ note = EvernoteNotePrototype(whole_note=whole_note)
+ assert whole_note.tagNames
+ assert note.Tags
+ if evernote_guid:
+ notes_updated.append(note)
+ queries1.append([evernote_guid])
+ else:
+ notes_created.append(note)
+ queries2.append([rootTitle, contents])
+ else:
+ tmr.reportNoBreak()
+ tmr.Report(self.anki.add_evernote_notes(notes_created) if tmr.counts.created else 0,
+ self.anki.update_evernote_notes(notes_updated) if tmr.counts.updated else 0)
+ if tmr.counts.created.completed.subcount:
+ db.executemany("DELETE FROM {t} WHERE title = ? and contents = ? ", queries2)
+ if tmr.counts.updated.completed.subcount:
+ db.executemany("DELETE FROM {t} WHERE guid = ? ", queries1)
+ if tmr.is_success:
+ db.commit()
+ if tmr.should_retry:
+ create_timer(30 if tmr.status.IsDelayableError else EVERNOTE.UPLOAD.RESTART_INTERVAL,
+ self.upload_validated_notes, True)
+ return tmr.status, tmr.count, 0
+
+ def create_toc_auto(self):
+ db = ankDB()
+ def check_old_values():
+ old_values = db.first("UPPER(title) = UPPER(?) AND tagNames LIKE '{t_tauto}'",
+ rootTitle, columns='guid, content')
+ if not old_values:
+ log.go(rootTitle, 'Add')
+ return None, contents
+ evernote_guid, old_content = old_values
+ noteBodyUnencoded = self.evernote.makeNoteBody(contents, encode=False)
+ if type(old_content) != type(noteBodyUnencoded):
+ log.go([rootTitle, type(old_content), type(noteBodyUnencoded)], 'Update\\Diffs\\_')
+ raise UnicodeWarning
+ old_content = old_content.replace('guid-pending', evernote_guid).replace("'", '"')
+ noteBodyUnencoded = noteBodyUnencoded.replace('guid-pending', evernote_guid).replace("'", '"')
+ if old_content == noteBodyUnencoded:
+ log.go(rootTitle, 'Skipped')
+ tmr.reportSkipped()
+ return None, None
+ log.go(noteBodyUnencoded, 'Update\\New\\' + rootTitle, clear=True)
+ log.go(generate_diff(old_content, noteBodyUnencoded), 'Update\\Diffs\\' + rootTitle, clear=True)
+ return evernote_guid, contents.replace(
+ '/guid-pending/', '/%s/' % evernote_guid).replace('/guid-pending/', '/%s/' % evernote_guid)
+
+ update_regex()
+ noteType = 'create-toc_auto_notes'
+ db.delete("noteType = '%s'" % noteType, table=TABLES.NOTE_VALIDATION_QUEUE)
+ NotesDB = EvernoteNotes()
+ NotesDB.baseQuery = ANKNOTES.HIERARCHY.ROOT_TITLES_BASE_QUERY
+ dbRows = NotesDB.populateAllNonCustomRootNotes()
+ notes_created, notes_updated = [], []
+ """
+ :type: (list[EvernoteNote], list[EvernoteNote])
+ """
+ info = stopwatch.ActionInfo('Creation of Table of Content Note(s)', row_source='Root Title(s)')
+ log = Logger('See Also\\2-%s\\' % noteType, rm_path=True)
+ tmr = stopwatch.Timer(len(dbRows), 25, info, max_allowed=EVERNOTE.UPLOAD.MAX,
+ label=log.base_path)
+ if tmr.actionInitializationFailed:
+ return tmr.status, 0, 0
+ for dbRow in dbRows:
+ rootTitle, contents, tagNames, notebookGuid = dbRow.items()
+ tagNames = (set(tagNames[1:-1].split(',')) | {TAGS.TOC, TAGS.TOC_AUTO} | (
+ {"#Sandbox"} if EVERNOTE.API.IS_SANDBOXED else set())) - {TAGS.REVERSIBLE, TAGS.REVERSE_ONLY}
+ rootTitle = generateTOCTitle(rootTitle)
+ evernote_guid, contents = check_old_values()
+ if contents is None:
+ continue
+ if not tmr.checkLimits():
+ break
+ if not EVERNOTE.UPLOAD.ENABLED:
+ tmr.reportStatus(EvernoteAPIStatus.Disabled, title=rootTitle)
+ continue
+ whole_note = tmr.autoStep(
+ self.evernote.makeNote(rootTitle, contents, tagNames, notebookGuid, noteType=noteType,
+ guid=evernote_guid), rootTitle, evernote_guid)
+ if tmr.report_result is False:
+ raise ValueError
+ if tmr.status.IsDelayableError:
+ break
+ if not tmr.status.IsSuccess:
+ continue
+ (notes_updated if evernote_guid else notes_created).append(EvernoteNotePrototype(whole_note=whole_note))
+ tmr.Report(self.anki.add_evernote_notes(notes_created) if tmr.counts.created.completed else 0,
+ self.anki.update_evernote_notes(notes_updated) if tmr.counts.updated.completed else 0)
+ if tmr.counts.queued:
+ db.commit()
+ return tmr.status, tmr.count, tmr.counts.skipped.val
+
+ def update_ancillary_data(self):
+ self.evernote.update_ancillary_data()
+
+ def proceed(self, auto_paging=False):
+ if not self.evernoteImporter:
+ self.evernoteImporter = EvernoteImporter()
+ self.evernoteImporter.anki = self.anki
+ self.evernoteImporter.evernote = self.evernote
+ self.evernoteImporter.forceAutoPage = self.forceAutoPage
+ self.evernoteImporter.auto_page_callback = self.auto_page_callback
+ if not hasattr(self, 'currentPage'):
+ self.currentPage = 1
+ self.evernoteImporter.currentPage = self.currentPage
+ if hasattr(self, 'ManualGUIDs'):
+ self.evernoteImporter.ManualGUIDs = self.ManualGUIDs
+ self.evernoteImporter.proceed(auto_paging)
+
+ def resync_with_local_db(self):
+ log_banner('Resync With Local DB', clear=False, append_newline=False, prepend_newline=True)
+ evernote_guids = get_all_local_db_guids()
+ tmr = stopwatch.Timer(evernote_guids, strInfo='Resync Notes From Local DB', label='resync_with_local_db\\')
+ results = self.evernote.create_evernote_notes(evernote_guids, use_local_db_only=True)
+ """:type: EvernoteNoteFetcherResults"""
+ log(' > Finished Creating Evernote Notes: '.ljust(40) + tmr.str_long)
+ tmr.reset()
+ number = self.anki.update_evernote_notes(results.Notes, log_update_if_unchanged=False)
+ log(' > Finished Updating Anki Notes: '.ljust(40) + tmr.str_long)
+ tooltip = '%d Evernote Notes Created
%d Anki Notes Successfully Updated' % (results.Local, number)
+ show_report(' > Resync with Local DB Complete', tooltip)
diff --git a/anknotes/EvernoteImporter.py b/anknotes/EvernoteImporter.py
new file mode 100644
index 0000000..86c88eb
--- /dev/null
+++ b/anknotes/EvernoteImporter.py
@@ -0,0 +1,318 @@
+# -*- coding: utf-8 -*-
+### Python Imports
+import socket
+
+try:
+ from pysqlite2 import dbapi2 as sqlite
+except ImportError:
+ from sqlite3 import dbapi2 as sqlite
+
+### Anknotes Shared Imports
+from anknotes.shared import *
+from anknotes.error import *
+
+### Anknotes Class Imports
+from anknotes.AnkiNotePrototype import AnkiNotePrototype
+from anknotes.structs_base import UpdateExistingNotes
+
+### Anknotes Main Imports
+from anknotes.Anki import Anki
+from anknotes.ankEvernote import Evernote
+from anknotes.EvernoteNotes import EvernoteNotes
+from anknotes.EvernoteNotePrototype import EvernoteNotePrototype
+
+try:
+ from anknotes import settings
+except Exception:
+ pass
+
+### Evernote Imports
+from anknotes.evernote.edam.notestore.ttypes import NoteFilter, NotesMetadataResultSpec, NoteMetadata, NotesMetadataList
+from anknotes.evernote.edam.type.ttypes import NoteSortOrder, Note as EvernoteNote
+from anknotes.evernote.edam.error.ttypes import EDAMSystemException
+
+### Anki Imports
+try:
+ from aqt import mw
+except Exception:
+ pass
+
+
+class EvernoteImporter:
+ forceAutoPage = False
+ auto_page_callback = None
+ """:type : lambda"""
+ anki = None
+ """:type : Anki"""
+ evernote = None
+ """:type : Evernote"""
+ updateExistingNotes = UpdateExistingNotes.UpdateNotesInPlace
+ ManualGUIDs = None
+
+ @property
+ def ManualMetadataMode(self):
+ return (self.ManualGUIDs is not None and len(self.ManualGUIDs) > 0)
+
+ def __init(self):
+ self.updateExistingNotes = SETTINGS.ANKI.UPDATE_EXISTING_NOTES.fetch(UpdateExistingNotes.UpdateNotesInPlace)
+ self.ManualGUIDs = None
+
+ def override_evernote_metadata(self):
+ guids = self.ManualGUIDs
+ self.MetadataProgress = EvernoteMetadataProgress(self.currentPage)
+ self.MetadataProgress.Total = len(guids)
+ self.MetadataProgress.Current = min(self.MetadataProgress.Total - self.MetadataProgress.Offset,
+ EVERNOTE.IMPORT.QUERY_LIMIT)
+ result = NotesMetadataList()
+ result.totalNotes = len(guids)
+ result.updateCount = -1
+ result.startIndex = self.MetadataProgress.Offset
+ result.notes = []
+ """:type : list[NoteMetadata]"""
+ for i in range(self.MetadataProgress.Offset, self.MetadataProgress.Completed):
+ result.notes.append(NoteMetadata(guids[i]))
+ self.MetadataProgress.loadResults(result)
+ self.evernote.metadata = self.MetadataProgress.NotesMetadata
+ return True
+
+ def get_evernote_metadata(self):
+ """
+ :returns: Metadata Progress Instance
+ :rtype : EvernoteMetadataProgress)
+ """
+ query = settings.generate_evernote_query()
+ evernote_filter = NoteFilter(words=query, ascending=True, order=NoteSortOrder.UPDATED)
+ self.MetadataProgress = EvernoteMetadataProgress(self.currentPage)
+ spec = NotesMetadataResultSpec(includeTitle=False, includeUpdated=False, includeUpdateSequenceNum=True,
+ includeTagGuids=True, includeNotebookGuid=True)
+ notestore_status = self.evernote.initialize_note_store()
+ if not notestore_status.IsSuccess:
+ self.MetadataProgress.Status = notestore_status
+ return False # notestore_status
+ api_action_str = u'trying to search for note metadata'
+ log_api("findNotesMetadata", "[Offset: %3d]: Query: '%s'" % (self.MetadataProgress.Offset, query))
+ try:
+ result = self.evernote.noteStore.findNotesMetadata(self.evernote.token, evernote_filter,
+ self.MetadataProgress.Offset,
+ EVERNOTE.IMPORT.METADATA_RESULTS_LIMIT, spec)
+ """
+ :type: NotesMetadataList
+ """
+ except EDAMSystemException as e:
+ if not HandleEDAMRateLimitError(e, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ self.MetadataProgress.Status = EvernoteAPIStatus.RateLimitError
+ return False
+ except socket.error as v:
+ if not HandleSocketError(v, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ self.MetadataProgress.Status = EvernoteAPIStatus.SocketError
+ return False
+ self.MetadataProgress.loadResults(result)
+ self.evernote.metadata = self.MetadataProgress.NotesMetadata
+ log(self.MetadataProgress.Summary, line_padding_header="- Metadata Results: ",
+ line_padding=ANKNOTES.FORMATTING.LINE_PADDING_HEADER, timestamp=False)
+ return True
+
+ def update_in_anki(self, evernote_guids):
+ """
+ :rtype : EvernoteNoteFetcherResults
+ """
+ Results = self.evernote.create_evernote_notes(evernote_guids)
+ if self.ManualMetadataMode:
+ self.evernote.check_notebooks_up_to_date()
+ self.anki.notebook_data = self.evernote.notebook_data
+ Results.Imported = self.anki.update_evernote_notes(Results.Notes)
+ return Results
+
+ def import_into_anki(self, evernote_guids):
+ """
+ :rtype : EvernoteNoteFetcherResults
+ """
+ Results = self.evernote.create_evernote_notes(evernote_guids)
+ if self.ManualMetadataMode:
+ self.evernote.check_notebooks_up_to_date()
+ self.anki.notebook_data = self.evernote.notebook_data
+ Results.Imported = self.anki.add_evernote_notes(Results.Notes)
+ return Results
+
+ def check_note_sync_status(self, evernote_guids):
+ """
+ Check for already existing, up-to-date, local db entries by Evernote GUID
+ :param evernote_guids: List of GUIDs
+ :return: List of Already Existing Evernote GUIDs
+ :rtype: list[str]
+ """
+ notes_already_up_to_date = []
+ db = ankDB()
+ for evernote_guid in evernote_guids:
+ db_usn = db.scalar({'guid': evernote_guid}, columns='updateSequenceNum')
+ if not self.evernote.metadata[evernote_guid].updateSequenceNum:
+ server_usn = 'N/A'
+ else:
+ server_usn = self.evernote.metadata[evernote_guid].updateSequenceNum
+ if evernote_guid in self.anki.usns:
+ current_usn = self.anki.usns[evernote_guid]
+ if current_usn == str(server_usn):
+ log_info = None # 'ANKI NOTE UP-TO-DATE'
+ notes_already_up_to_date.append(evernote_guid)
+ elif str(db_usn) == str(server_usn):
+ log_info = 'DATABASE ENTRY UP-TO-DATE'
+ else:
+ log_info = 'NO COPIES UP-TO-DATE'
+ else:
+ current_usn = 'N/A'
+ log_info = 'NO ANKI USN EXISTS'
+ if log_info:
+ if not self.evernote.metadata[evernote_guid].updateSequenceNum:
+ log_info += ' (Unable to find Evernote Metadata) '
+ log(" > USN check for note '%s': %s: db/current/server = %s,%s,%s" % (
+ evernote_guid, log_info, str(db_usn), str(current_usn), str(server_usn)), 'usn')
+ return notes_already_up_to_date
+
+ def proceed(self, auto_paging=False):
+ self.proceed_start(auto_paging)
+ self.proceed_find_metadata(auto_paging)
+ self.proceed_import_notes()
+ self.proceed_autopage()
+
+ def proceed_start(self, auto_paging=False):
+ col = self.anki.collection()
+ lastImport = SETTINGS.EVERNOTE.LAST_IMPORT.fetch()
+ SETTINGS.EVERNOTE.LAST_IMPORT.save(datetime.now().strftime(ANKNOTES.DATE_FORMAT))
+ lastImportStr = get_friendly_interval_string(lastImport)
+ if lastImportStr:
+ lastImportStr = ' [LAST IMPORT: %s]' % lastImportStr
+ log_str = " > Starting Evernote Import: Page %3s Query: %s" % (
+ '#' + str(self.currentPage), settings.generate_evernote_query())
+ log_banner(log_str.ljust(ANKNOTES.FORMATTING.TEXT_LENGTH-len(lastImportStr)) + lastImportStr, append_newline=False,
+ chr='=', length=0, center=False, clear=False, timestamp=True)
+ if auto_paging:
+ return True
+ notestore_status = self.evernote.initialize_note_store()
+ if not notestore_status == EvernoteAPIStatus.Success:
+ log(" > Note store does not exist. Aborting.")
+ show_tooltip("Could not connect to Evernote servers (Status Code: %s)... Aborting." % notestore_status.name)
+ return False
+ self.evernote.getNoteCount = 0
+ return True
+
+ def proceed_find_metadata(self, auto_paging=False):
+ global latestEDAMRateLimit, latestSocketError
+
+ if self.ManualMetadataMode:
+ self.override_evernote_metadata()
+ else:
+ self.get_evernote_metadata()
+
+ if self.MetadataProgress.Status == EvernoteAPIStatus.RateLimitError:
+ m, s = divmod(latestEDAMRateLimit, 60)
+ show_report(" > Error: Delaying Operation",
+ "Over the rate limit when searching for Evernote metadata
Evernote requested we wait %d:%02d min" % (
+ m, s), delay=5)
+ create_timer(latestEDAMRateLimit + 10, self.proceed, auto_paging)
+ return False
+ elif self.MetadataProgress.Status == EvernoteAPIStatus.SocketError:
+ show_report(" > Error: Delaying Operation:",
+ "%s when searching for Evernote metadata" %
+ latestSocketError['friendly_error_msg'], "We will try again in 30 seconds", delay=5)
+ create_timer(30, self.proceed, auto_paging)
+ return False
+
+ self.ImportProgress = EvernoteImportProgress(self.anki, self.MetadataProgress)
+ self.ImportProgress.loadAlreadyUpdated(
+ [] if self.ManualMetadataMode else self.check_note_sync_status(
+ self.ImportProgress.GUIDs.Server.Existing.All))
+ log(self.ImportProgress.Summary + "\n", line_padding_header="- Note Sync Status: ",
+ line_padding=ANKNOTES.FORMATTING.LINE_PADDING_HEADER, timestamp=False)
+
+ def proceed_import_notes(self):
+ self.anki.start_editing()
+ self.ImportProgress.processResults(self.import_into_anki(self.ImportProgress.GUIDs.Server.New))
+ if self.updateExistingNotes == UpdateExistingNotes.UpdateNotesInPlace:
+ self.ImportProgress.processUpdateInPlaceResults(
+ self.update_in_anki(self.ImportProgress.GUIDs.Server.Existing.OutOfDate))
+ elif self.updateExistingNotes == UpdateExistingNotes.DeleteAndReAddNotes:
+ self.anki.delete_anki_cards(self.ImportProgress.GUIDs.Server.Existing.OutOfDate)
+ self.ImportProgress.processDeleteAndUpdateResults(
+ self.import_into_anki(self.ImportProgress.GUIDs.Server.Existing.OutOfDate))
+ show_report(" > Import Complete", self.ImportProgress.ResultsSummaryLines)
+ self.anki.stop_editing()
+ self.anki.collection().autosave()
+
+ def save_current_page(self):
+ if self.forceAutoPage:
+ return
+ SETTINGS.EVERNOTE.PAGINATION_CURRENT_PAGE.save(self.currentPage)
+
+ def proceed_autopage(self):
+ if not self.autoPagingEnabled:
+ return
+ global latestEDAMRateLimit, latestSocketError
+ status = self.ImportProgress.Status
+ restart = 0
+ if status == EvernoteAPIStatus.RateLimitError:
+ m, s = divmod(latestEDAMRateLimit, 60)
+ show_report(" > Error: Delaying Auto Paging",
+ "Over the rate limit when getting Evernote notes
Evernote requested we wait %d:%02d min" % (
+ m, s), delay=5)
+ create_timer(latestEDAMRateLimit + 10, self.proceed, True)
+ return False
+ if status == EvernoteAPIStatus.SocketError:
+ show_report(" > Error: Delaying Auto Paging:",
+ "%s when getting Evernote notes" % latestSocketError[
+ 'friendly_error_msg'],
+ "We will try again in 30 seconds", delay=5)
+ create_timer(30, self.proceed, True)
+ return False
+ if self.MetadataProgress.IsFinished:
+ self.currentPage = 1
+ if self.forceAutoPage:
+ show_report(" > Terminating Auto Paging",
+ "All %d notes have been processed and forceAutoPage is True" % self.MetadataProgress.Total,
+ delay=5)
+ if self.auto_page_callback:
+ self.auto_page_callback()
+ return True
+ elif EVERNOTE.IMPORT.PAGING.RESTART.ENABLED:
+ restart = max(EVERNOTE.IMPORT.PAGING.RESTART.INTERVAL, 60 * 15)
+ restart_title = " > Restarting Auto Paging"
+ restart_msg = "All %d notes have been processed and EVERNOTE.IMPORT.PAGING.RESTART.ENABLED is True
" % \
+ self.MetadataProgress.Total
+ suffix = "Per EVERNOTE.IMPORT.PAGING.RESTART.INTERVAL, "
+ else:
+ show_report(" > Completed Auto Paging",
+ "All %d notes have been processed and EVERNOTE.IMPORT.PAGING.RESTART.ENABLED is False" %
+ self.MetadataProgress.Total, delay=5)
+ self.save_current_page()
+ return True
+ else: # Paging still in progress (else to )
+ self.currentPage = self.MetadataProgress.Page + 1
+ restart_title = " > Continuing Auto Paging"
+ restart_msg = "Page %d completed
%d notes remain over %d page%s
%d of %d notes have been processed" % (
+ self.MetadataProgress.Page, self.MetadataProgress.Remaining, self.MetadataProgress.RemainingPages,
+ 's' if self.MetadataProgress.RemainingPages > 1 else '', self.MetadataProgress.Completed,
+ self.MetadataProgress.Total)
+ restart = -1 * max(30, EVERNOTE.IMPORT.PAGING.RESTART.INTERVAL_OVERRIDE)
+ if self.forceAutoPage:
+ suffix = "
Only delaying {interval} as the forceAutoPage flag is set"
+ elif self.ImportProgress.APICallCount < EVERNOTE.IMPORT.PAGING.RESTART.DELAY_MINIMUM_API_CALLS:
+ suffix = "
Only delaying {interval} as the API Call Count of %d is less than the minimum of %d set by EVERNOTE.IMPORT.PAGING.RESTART.DELAY_MINIMUM_API_CALLS" % (
+ self.ImportProgress.APICallCount, EVERNOTE.IMPORT.PAGING.RESTART.DELAY_MINIMUM_API_CALLS)
+ else:
+ restart = max(EVERNOTE.IMPORT.PAGING.INTERVAL_SANDBOX, 60 * 5) if EVERNOTE.API.IS_SANDBOXED else max(
+ EVERNOTE.IMPORT.PAGING.INTERVAL, 60 * 10)
+ suffix = "
Delaying Auto Paging: Per EVERNOTE.IMPORT.PAGING.INTERVAL, "
+ self.save_current_page()
+ if restart > 0:
+ suffix += "will delay for {interval} before continuing"
+ m, s = divmod(abs(restart), 60)
+ suffix = suffix.format(interval=['%2ds' % s, '%d:%02d min' % (m, s)][m > 0])
+ show_report(restart_title, (restart_msg + suffix).split('
'), delay=5)
+ if restart:
+ create_timer(restart, self.proceed, True)
+ return self.proceed(True)
+
+ @property
+ def autoPagingEnabled(self):
+ return SETTINGS.EVERNOTE.AUTO_PAGING.fetch() or self.forceAutoPage
diff --git a/anknotes/EvernoteNoteFetcher.py b/anknotes/EvernoteNoteFetcher.py
new file mode 100644
index 0000000..aacb9f4
--- /dev/null
+++ b/anknotes/EvernoteNoteFetcher.py
@@ -0,0 +1,178 @@
+### Python Imports
+import socket
+
+### Anknotes Shared Imports
+from anknotes.base import decode
+from anknotes.shared import *
+from anknotes.EvernoteNotePrototype import EvernoteNotePrototype
+from anknotes.error import *
+
+### Evernote Imports
+from evernote.edam.error.ttypes import EDAMSystemException
+
+
+class EvernoteNoteFetcher(object):
+ def __init__(self, evernote=None, guid=None, use_local_db_only=False):
+ """
+
+ :type evernote: ankEvernote.Evernote
+ """
+ self.__reset_data()
+ self.results = EvernoteNoteFetcherResults()
+ self.result = EvernoteNoteFetcherResult()
+ self.api_calls = 0
+ self.keepEvernoteTags, self.deleteQueryTags = True, True
+ self.evernoteQueryTags, self.tagsToDelete = [], []
+ self.use_local_db_only = use_local_db_only
+ self.__update_sequence_number = -1
+ self.evernote = evernote if evernote else None
+ if not guid:
+ self.guid = ""; return
+ self.guid = guid
+ if evernote and not self.use_local_db_only:
+ self.__update_sequence_number = self.evernote.metadata[
+ self.guid].updateSequenceNum
+ self.getNote()
+
+ def __reset_data(self):
+ self.tagNames = []
+ self.tagGuids = []
+ self.whole_note = None
+
+ @property
+ def UpdateSequenceNum(self):
+ if self.result.Note:
+ return self.result.Note.UpdateSequenceNum
+ return self.__update_sequence_number
+
+ def reportSuccess(self, note, source=None):
+ self.reportResult(EvernoteAPIStatus.Success, note, source)
+
+ def reportResult(self, status=None, note=None, source=None):
+ if note:
+ self.result.Note = note
+ status = EvernoteAPIStatus.Success
+ if not source:
+ source = 2
+ if status:
+ self.result.Status = status
+ if source:
+ self.result.Source = source
+ self.results.reportResult(self.result)
+
+ def getNoteLocal(self):
+ # Check Anknotes database for note
+ query = "guid = '%s'" % self.guid
+ if self.UpdateSequenceNum > -1:
+ query += " AND `updateSequenceNum` = %d" % self.UpdateSequenceNum
+ db_note = ankDB().first(query)
+ """:type : sqlite.Row"""
+ if not db_note:
+ return False
+ if not self.use_local_db_only:
+ log(' ' + '-' * 14 + ' ' * 5 + "> getNoteLocal: %s" % db_note['title'], 'api')
+ assert db_note['guid'] == self.guid
+ self.reportSuccess(EvernoteNotePrototype(db_note=db_note), 1)
+ self.setNoteTags(tag_names=self.result.Note.TagNames, tag_guids=self.result.Note.TagGuids)
+ return True
+
+ def setNoteTags(self, tag_names=None, tag_guids=None):
+ if not self.keepEvernoteTags:
+ self.tagGuids, self.tagNames = [], []; return
+ # if not tag_names:
+ # if self.tagNames: tag_names = self.tagNames
+ # if not tag_names and self.result.Note: tag_names = self.result.Note.TagNames
+ # if not tag_names and self.whole_note: tag_names = self.whole_note.tagNames
+ # if not tag_names: tag_names = None
+ if not tag_guids:
+ tag_guids = self.tagGuids if self.tagGuids else (
+ self.result.Note.TagGuids if self.result.Note else (self.whole_note.tagGuids if self.whole_note else None))
+ if not tag_names:
+ tag_names = self.tagNames if self.tagNames else (
+ self.result.Note.TagNames if self.result.Note else (self.whole_note.tagNames if self.whole_note else None))
+ if not self.evernote or self.result.Source is 1:
+ self.tagGuids, self.tagNames = tag_guids, tag_names; return
+ self.tagGuids, self.tagNames = self.evernote.get_matching_tag_data(tag_guids, tag_names)
+
+ def addNoteFromServerToDB(self, whole_note=None, tag_names=None):
+ """
+ Adds note to Anknote DB from an Evernote Note object provided by the Evernote API
+ :type whole_note : evernote.edam.type.ttypes.Note
+ """
+ if whole_note:
+ self.whole_note = whole_note
+ if tag_names:
+ self.tagNames = tag_names
+ log('Adding %s: %s' % (self.whole_note.guid, self.whole_note.title), 'ankDB')
+ if not self.tagGuids:
+ self.tagGuids = self.whole_note.tagGuids
+ auto_columns = ['guid', 'title', 'content', 'updated', 'created', 'updateSequenceNum', 'notebookGuid']
+ columns = {key: getattr(self.whole_note, key) for key in auto_columns}
+ columns.update({key: getattr(self, key) for key in ['tagNames', 'tagGuids']})
+ for key, value in columns.items():
+ if isinstance(value, list):
+ columns[key] = u',' + u','.join(map(decode, value)) + u','
+ elif isinstance(value, str):
+ columns[key] = decode(value)
+ db = ankDB()
+ db.insert_or_replace(columns)
+ db.insert(columns, table=TABLES.EVERNOTE.NOTES_HISTORY)
+ db.commit()
+
+ def getNoteRemoteAPICall(self):
+ notestore_status = self.evernote.initialize_note_store()
+ if not notestore_status.IsSuccess:
+ self.reportResult(notestore_status)
+ return False
+ api_action_str = u'trying to retrieve a note. We will save the notes downloaded thus far.'
+ self.api_calls += 1
+ log_api(" > getNote [%3d]" % self.api_calls, self.guid)
+ try:
+ self.whole_note = self.evernote.noteStore.getNote(self.evernote.token, self.guid, True, False,
+ False, False)
+ """:type : evernote.edam.type.ttypes.Note"""
+ except EDAMSystemException as e:
+ if not HandleEDAMRateLimitError(e, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ self.reportResult(EvernoteAPIStatus.RateLimitError)
+ return False
+ except socket.error as v:
+ if not HandleSocketError(v, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ self.reportResult(EvernoteAPIStatus.SocketError)
+ return False
+ assert self.whole_note.guid == self.guid
+ return True
+
+ def getNoteRemote(self):
+ if self.api_calls > EVERNOTE.IMPORT.API_CALLS_LIMIT > -1:
+ log(
+ "Aborting Evernote.getNoteRemote: EVERNOTE.IMPORT.API_CALLS_LIMIT of %d has been reached" % EVERNOTE.IMPORT.API_CALLS_LIMIT)
+ return None
+ if not self.getNoteRemoteAPICall():
+ return False
+ # self.tagGuids, self.tagNames = self.evernote.get_tag_names_from_guids(self.whole_note.tagGuids)
+ self.setNoteTags(tag_guids=self.whole_note.tagGuids)
+ self.addNoteFromServerToDB()
+ if not self.keepEvernoteTags:
+ self.tagNames = []
+ self.reportSuccess(EvernoteNotePrototype(whole_note=self.whole_note, tags=self.tagNames))
+ return True
+
+ def setNote(self, whole_note):
+ self.whole_note = whole_note
+ self.addNoteFromServerToDB()
+
+ def getNote(self, guid=None):
+ self.__reset_data()
+ if guid:
+ self.result.Note = None
+ self.guid = guid
+ self.evernote.guid = guid
+ self.__update_sequence_number = self.evernote.metadata[
+ self.guid].updateSequenceNum if not self.use_local_db_only else -1
+ if self.getNoteLocal():
+ return True
+ if self.use_local_db_only:
+ return False
+ return self.getNoteRemote()
diff --git a/anknotes/EvernoteNotePrototype.py b/anknotes/EvernoteNotePrototype.py
new file mode 100644
index 0000000..dbb925f
--- /dev/null
+++ b/anknotes/EvernoteNotePrototype.py
@@ -0,0 +1,139 @@
+### Anknotes Shared Imports
+from anknotes.base import is_str_type, decode
+from anknotes.html import generate_evernote_url, generate_evernote_link, generate_evernote_link_by_level
+from anknotes.structs import upperFirst, EvernoteAPIStatus
+from anknotes.logging import log, log_blank, log_error
+
+### Anknotes Class Imports
+from anknotes.EvernoteNoteTitle import EvernoteNoteTitle
+
+
+class EvernoteNotePrototype:
+ ################## CLASS Note ################
+ Title = None
+ """:type: EvernoteNoteTitle"""
+ Content = ""
+ Guid = ""
+ UpdateSequenceNum = -1
+ """:type: int"""
+ TagNames = []
+ TagGuids = []
+ NotebookGuid = None
+ Status = EvernoteAPIStatus.Uninitialized
+ """:type : EvernoteAPIStatus """
+ Children = []
+
+ @property
+ def Tags(self):
+ return self.TagNames
+
+ def process_tags(self):
+ if is_str_type(self.TagNames):
+ self.TagNames = self.TagNames[1:-1].split(',')
+ if is_str_type(self.TagGuids):
+ self.TagGuids = self.TagGuids[1:-1].split(',')
+
+ def __repr__(self):
+ return u"
" % (self.Guid, self.Title)
+
+ def __init__(self, title=None, content=None, guid=None, tags=None, notebookGuid=None, updateSequenceNum=None,
+ whole_note=None, db_note=None):
+ """
+
+ :type whole_note: evernote.edam.type.ttypes.Note
+ :type db_note: sqlite3.dbapi2.Row
+ """
+
+ self.Status = EvernoteAPIStatus.Uninitialized
+ self.TagNames = tags
+ if whole_note is not None:
+ if self.TagNames is None:
+ self.TagNames = whole_note.tagNames
+ self.Title = EvernoteNoteTitle(whole_note)
+ self.Content = whole_note.content
+ self.Guid = whole_note.guid
+ self.NotebookGuid = whole_note.notebookGuid
+ self.UpdateSequenceNum = whole_note.updateSequenceNum
+ self.Status = EvernoteAPIStatus.Success
+ return
+ if db_note is not None:
+ self.Title = EvernoteNoteTitle(db_note)
+ db_note_keys = db_note.keys()
+ for key in ['content', 'guid', 'notebookGuid', 'updateSequenceNum', 'tagNames', 'tagGuids']:
+ if not key in db_note_keys:
+ log_error(
+ "FATAL ERROR: Unable to find key %s in db note %s! \n%s" % (key, self.FullTitle, db_note_keys))
+ log("Values: \n\n" + str({k: db_note[k] for k in db_note_keys}), 'EvernoteNotePrototypeInit')
+ else:
+ setattr(self, upperFirst(key), db_note[key])
+ self.TagNames = decode(self.TagNames)
+ self.Content = decode(self.Content)
+ self.process_tags()
+ self.Status = EvernoteAPIStatus.Success
+ return
+ self.Title = EvernoteNoteTitle(title)
+ self.Content = content
+ self.Guid = guid
+ self.NotebookGuid = notebookGuid
+ self.UpdateSequenceNum = updateSequenceNum
+ self.Status = EvernoteAPIStatus.Manual
+
+ def generateURL(self):
+ return generate_evernote_url(self.Guid)
+
+ def generateLink(self, value=None):
+ return generate_evernote_link(self.Guid, self.Title.Name, value)
+
+ def generateLevelLink(self, value=None):
+ return generate_evernote_link_by_level(self.Guid, self.Title.Name, value)
+
+ ### Shortcuts to EvernoteNoteTitle Properties; Autogenerated with regex /def +(\w+)\(\)\:/def \1\(\):\r\n\treturn self.Title.\1\r\n/
+ @property
+ def Level(self):
+ return self.Title.Level
+
+ @property
+ def Depth(self):
+ return self.Title.Depth
+
+ @property
+ def FullTitle(self): return self.Title.FullTitle
+
+ @property
+ def Name(self):
+ return self.Title.Name
+
+ @property
+ def Root(self):
+ return self.Title.Root
+
+ @property
+ def Base(self):
+ return self.Title.Base
+
+ @property
+ def Parent(self):
+ return self.Title.Parent
+
+ @property
+ def TitleParts(self):
+ return self.Title.TitleParts
+
+ @property
+ def IsChild(self):
+ return self.Title.IsChild
+
+ @property
+ def IsRoot(self):
+ return self.Title.IsRoot
+
+ def IsAboveLevel(self, level_check):
+ return self.Title.IsAboveLevel(level_check)
+
+ def IsBelowLevel(self, level_check):
+ return self.Title.IsBelowLevel(level_check)
+
+ def IsLevel(self, level_check):
+ return self.Title.IsLevel(level_check)
+
+ ################## END CLASS Note ################
diff --git a/anknotes/EvernoteNoteTitle.py b/anknotes/EvernoteNoteTitle.py
new file mode 100644
index 0000000..f221c8d
--- /dev/null
+++ b/anknotes/EvernoteNoteTitle.py
@@ -0,0 +1,234 @@
+# -*- coding: utf-8 -*-
+### Anknotes Shared Imports
+from anknotes.shared import *
+from anknotes.base import is_str_type
+from sys import stderr
+
+
+def generateTOCTitle(title):
+ title = EvernoteNoteTitle.titleObjectToString(title).upper()
+ for chr in u'αβδφḃ':
+ title = title.replace(chr.upper(), chr)
+ return title
+
+
+class EvernoteNoteTitle:
+ level = 0
+ __title = ""
+ """:type: str"""
+ __titleParts = None
+ """:type: list[str]"""
+
+ # # Parent = None
+ # def __str__(self):
+ # return "%d: %s" % (self.Level(), self.Title)
+
+ def __repr__(self):
+ return "<%s:%s>" % (self.__class__.__name__, self.FullTitle)
+
+ @property
+ def TitleParts(self):
+ if not self.FullTitle:
+ return []
+ if not self.__titleParts:
+ self.__titleParts = generateTitleParts(self.FullTitle)
+ return self.__titleParts
+
+ @property
+ def Level(self):
+ """
+ :rtype: int
+ :return: Current Level with 1 being the Root Title
+ """
+ if not self.level:
+ self.level = len(self.TitleParts)
+ return self.level
+
+ @property
+ def Depth(self):
+ return self.Level - 1
+
+ def Parts(self, level=-1):
+ return self.Slice(level)
+
+ def Part(self, level=-1):
+ mySlice = self.Parts(level)
+ if not mySlice:
+ return None
+ return mySlice.Root
+
+ def BaseParts(self, level=None):
+ return self.Slice(1, level)
+
+ def Parents(self, level=-1):
+ # noinspection PyTypeChecker
+ return self.Slice(None, level)
+
+ def Names(self, level=-1):
+ return self.Parts(level)
+
+ @property
+ def TOCTitle(self):
+ return generateTOCTitle(self.FullTitle)
+
+ @property
+ def TOCName(self):
+ return generateTOCTitle(self.Name)
+
+ @property
+ def TOCRootTitle(self):
+ return generateTOCTitle(self.Root)
+
+ @property
+ def Name(self):
+ return self.Part()
+
+ @property
+ def Root(self):
+ return self.Parents(1).FullTitle
+
+ @property
+ def Base(self):
+ return self.BaseParts()
+
+ def Slice(self, start=0, end=None):
+ # print "Slicing: <%s> %s ~ %d,%d" % (type(self.Title), self.Title, start, end)
+ oldParts = self.TitleParts
+ # print "Slicing: %s ~ %d,%d from parts %s" % (self.Title, start, end, str(oldParts))
+ assert self.FullTitle and oldParts
+ if start is None and end is None:
+ print "Slicing: %s ~ %d,%d from parts %s" % (self.FullTitle, start, end, str(oldParts))
+ assert start is not None or end is not None
+ newParts = oldParts[start:end]
+ if not newParts:
+ log_error("Slice failed for %s-%s of %s" % (str(start), str(end), self.FullTitle))
+ # return None
+ assert len(newParts) > 0
+ newStr = ': '.join(newParts)
+ # print "Slice: Just created new title %s from %s" % (newStr , self.Title)
+ return EvernoteNoteTitle(newStr)
+
+ @property
+ def Parent(self):
+ return self.Parents()
+
+ def IsAboveLevel(self, level_check):
+ return self.Level > level_check
+
+ def IsBelowLevel(self, level_check):
+ return self.Level < level_check
+
+ def IsLevel(self, level_check):
+ return self.Level == level_check
+
+ @property
+ def IsChild(self):
+ return self.IsAboveLevel(1)
+
+ @property
+ def IsRoot(self):
+ return self.IsLevel(1)
+
+ @staticmethod
+ def titleObjectToString(title, recursion=0):
+ """
+ :param title: Title in string, unicode, dict, sqlite, TOCKey or NoteTitle formats. Note objects are also parseable
+ :type title: None | str | unicode | dict[str,str] | sqlite.Row | EvernoteNoteTitle
+ :return: string Title
+ :rtype: str
+ """
+ # if recursion == 0:
+ # str_ = str_safe(title)
+ # try: log(u'\n---------------------------------%s' % str_, 'tOTS', timestamp=False)
+ # except Exception: log(u'\n---------------------------------%s' % '[UNABLE TO DISPLAY TITLE]', 'tOTS', timestamp=False)
+ # pass
+
+ if title is None:
+ # log('NoneType', 'tOTS', timestamp=False)
+ return ""
+ if is_str_type(title):
+ # log('str/unicode', 'tOTS', timestamp=False)
+ return title
+ if hasattr(title, 'FullTitle'):
+ # log('FullTitle', 'tOTS', timestamp=False)
+ # noinspection PyCallingNonCallable
+ title = title.FullTitle() if callable(title.FullTitle) else title.FullTitle
+ elif hasattr(title, 'Title'):
+ # log('Title', 'tOTS', timestamp=False)
+ title = title.Title() if callable(title.Title) else title.Title
+ elif hasattr(title, 'title'):
+ # log('title', 'tOTS', timestamp=False)
+ title = title.title() if callable(title.title) else title.title
+ else:
+ try:
+ if hasattr(title, 'keys'):
+ keys = title.keys() if callable(title.keys) else title.keys
+ if 'title' in keys:
+ # log('keys[title]', 'tOTS', timestamp=False)
+ title = title['title']
+ elif 'Title' in keys:
+ # log('keys[Title]', 'tOTS', timestamp=False)
+ title = title['Title']
+ elif not keys:
+ # log('keys[empty dict?]', 'tOTS', timestamp=False)
+ raise
+ else:
+ log('keys[Unknown Attr]: %s' % str(keys), 'tOTS', timestamp=False)
+ return ""
+ elif 'title' in title:
+ # log('[title]', 'tOTS', timestamp=False)
+ title = title['title']
+ elif 'Title' in title:
+ # log('[Title]', 'tOTS', timestamp=False)
+ title = title['Title']
+ elif FIELDS.TITLE in title:
+ # log('[FIELDS.TITLE]', 'tOTS', timestamp=False)
+ title = title[FIELDS.TITLE]
+ else:
+ # log('Nothing Found', 'tOTS', timestamp=False)
+ # log(title)
+ # log(title.keys())
+ return title
+ except Exception:
+ log('except', 'tOTS', timestamp=False)
+ log(title, 'toTS', timestamp=False)
+ raise LookupError
+ recursion += 1
+ # log(u'recursing %d: ' % recursion, 'tOTS', timestamp=False)
+ return EvernoteNoteTitle.titleObjectToString(title, recursion)
+
+ @property
+ def FullTitle(self):
+ """:rtype: str"""
+ return self.__title
+
+ @property
+ def HTML(self):
+ return self.__html
+
+ def __init__(self, titleObj=None):
+ """:type titleObj: str | unicode | sqlite.Row | EvernoteNoteTitle | evernote.edam.type.ttypes.Note | EvernoteNotePrototype.EvernoteNotePrototype """
+ self.__html = self.titleObjectToString(titleObj)
+ self.__title = strip_tags_and_new_lines(self.__html)
+
+
+def generateTitleParts(title):
+ title = EvernoteNoteTitle.titleObjectToString(title)
+ try:
+ strTitle = re.sub(':+', ':', title)
+ except Exception:
+ log('generateTitleParts Unable to re.sub')
+ log(type(title))
+ raise
+ strTitle = strTitle.strip(':')
+ partsText = strTitle.split(':')
+ count = len(partsText)
+ for i in range(1, count + 1):
+ txt = partsText[i - 1]
+ try:
+ txt = txt.strip()
+ except Exception:
+ print_safe(title + ' -- ' + '"' + txt + '"')
+ raise
+ partsText[i - 1] = txt
+ return partsText
diff --git a/anknotes/EvernoteNotes.py b/anknotes/EvernoteNotes.py
new file mode 100644
index 0000000..95a9660
--- /dev/null
+++ b/anknotes/EvernoteNotes.py
@@ -0,0 +1,444 @@
+# -*- coding: utf-8 -*-
+### Python Imports
+from operator import itemgetter
+
+try:
+ from pysqlite2 import dbapi2 as sqlite
+except ImportError:
+ from sqlite3 import dbapi2 as sqlite
+
+### Anknotes Main Imports
+from anknotes.base import encode
+from anknotes.shared import *
+from anknotes.EvernoteNoteTitle import *
+from anknotes.EvernoteNotePrototype import EvernoteNotePrototype
+from anknotes.toc import TOCHierarchyClass
+from anknotes.db import ankDB
+from anknotes import stopwatch
+
+### Anknotes Class Imports
+from anknotes.EvernoteNoteTitle import generateTOCTitle
+
+class EvernoteNoteProcessingFlags:
+ delayProcessing = False
+ populateRootTitlesList = True
+ populateRootTitlesDict = True
+ populateExistingRootTitlesList = False
+ populateExistingRootTitlesDict = False
+ populateMissingRootTitlesList = False
+ populateMissingRootTitlesDict = False
+ populateChildRootTitles = False
+ ignoreAutoTOCAsRootTitle = False
+ ignoreOutlineAsRootTitle = False
+
+ def __init__(self, flags=None):
+ if isinstance(flags, bool):
+ if not flags:
+ self.set_default(False)
+ if flags:
+ self.update(flags)
+
+ def set_default(self, flag):
+ self.populateRootTitlesList = flag
+ self.populateRootTitlesDict = flag
+
+ def update(self, flags):
+ for flag_name, flag_value in flags:
+ if hasattr(self, flag_name):
+ setattr(self, flag_name, flag_value)
+
+
+class EvernoteNotesCollection:
+ TitlesList = []
+ TitlesDict = {}
+ NotesDict = {}
+ """:type : dict[str, EvernoteNote.EvernoteNote]"""
+ ChildNotesDict = {}
+ """:type : dict[str, EvernoteNote.EvernoteNote]"""
+ ChildTitlesDict = {}
+
+ def __init__(self):
+ self.TitlesList = []
+ self.TitlesDict = {}
+ self.NotesDict = {}
+ self.ChildNotesDict = {}
+ self.ChildTitlesDict = {}
+
+
+class EvernoteNotes:
+ ################## CLASS Notes ################
+ Notes = {}
+ """:type : dict[str, EvernoteNote.EvernoteNote]"""
+ RootNotes = EvernoteNotesCollection()
+ RootNotesChildren = EvernoteNotesCollection()
+ processingFlags = EvernoteNoteProcessingFlags()
+ baseQuery = "1"
+
+ def __init__(self, delayProcessing=False):
+ self.processingFlags.delayProcessing = delayProcessing
+ self.RootNotes = EvernoteNotesCollection()
+
+ def addNoteSilently(self, enNote):
+ """:type enNote: EvernoteNote.EvernoteNote"""
+ assert enNote
+ self.Notes[enNote.Guid] = enNote
+
+ def addNote(self, enNote):
+ """:type enNote: EvernoteNote.EvernoteNote"""
+ assert enNote
+ self.addNoteSilently(enNote)
+ if self.processingFlags.delayProcessing:
+ return
+ self.processNote(enNote)
+
+ def addDBNote(self, dbNote):
+ """:type dbNote: sqlite.Row"""
+ enNote = EvernoteNotePrototype(db_note=dbNote)
+ if not enNote:
+ log(dbNote)
+ log(dbNote.keys)
+ log(dir(dbNote))
+ assert enNote
+ self.addNote(enNote)
+
+ def addDBNotes(self, dbNotes):
+ """:type dbNotes: list[sqlite.Row]"""
+ for dbNote in dbNotes:
+ self.addDBNote(dbNote)
+
+ def addDbQuery(self, sql_query, order=''):
+ if not sql_query:
+ sql_query = '1'
+ if self.baseQuery and self.baseQuery != '1':
+ if sql_query == '1':
+ sql_query = self.baseQuery
+ else:
+ sql_query = "(%s) AND (%s) " % (self.baseQuery, sql_query)
+ if order:
+ sql_query += ' ORDER BY ' + order
+ dbNotes = ankDB().execute(sql_query)
+ self.addDBNotes(dbNotes)
+
+ @staticmethod
+ def getNoteFromDB(query):
+ """
+
+ :param query:
+ :return:
+ :rtype : sqlite.Row
+ """
+ dbNote = ankDB().first(query)
+ if not dbNote:
+ return None
+ return dbNote
+
+ def getNoteFromDBByGuid(self, guid):
+ sql_query = "guid = '%s' " % guid
+ return self.getNoteFromDB(sql_query)
+
+ def getEnNoteFromDBByGuid(self, guid):
+ return EvernoteNotePrototype(db_note=self.getNoteFromDBByGuid(guid))
+
+ # def addChildNoteHierarchically(self, enChildNotes, enChildNote):
+ # parts = enChildNote.Title.TitleParts
+ # dict_updated = {}
+ # dict_building = {parts[len(parts)-1]: enChildNote}
+ # print_safe(parts)
+ # for i in range(len(parts), 1, -1):
+ # dict_building = {parts[i - 1]: dict_building}
+ # log_dump(dict_building)
+ # enChildNotes.update(dict_building)
+ # log_dump(enChildNotes)
+ # return enChildNotes
+
+ def processNote(self, enNote):
+ """:type enNote: EvernoteNote.EvernoteNote"""
+ db = ankDB()
+ if self.processingFlags.populateRootTitlesList or self.processingFlags.populateRootTitlesDict or self.processingFlags.populateMissingRootTitlesList or self.processingFlags.populateMissingRootTitlesDict:
+ if enNote.IsChild:
+ # log([enNote.Title, enNote.Level, enNote.Title.TitleParts, enNote.IsChild])
+ rootTitle = enNote.Title.Root
+ rootTitleStr = generateTOCTitle(rootTitle)
+ if self.processingFlags.populateMissingRootTitlesList or self.processingFlags.populateMissingRootTitlesDict:
+ if not rootTitleStr in self.RootNotesExisting.TitlesList:
+ if not rootTitleStr in self.RootNotesMissing.TitlesList:
+ self.RootNotesMissing.TitlesList.append(rootTitleStr)
+ self.RootNotesMissing.ChildTitlesDict[rootTitleStr] = {}
+ self.RootNotesMissing.ChildNotesDict[rootTitleStr] = {}
+ if not enNote.Title.Base:
+ log(enNote.Title)
+ log(enNote.Base)
+ assert enNote.Title.Base
+ childBaseTitleStr = enNote.Title.Base.FullTitle
+ if childBaseTitleStr in self.RootNotesMissing.ChildTitlesDict[rootTitleStr]:
+ log_error("Duplicate Child Base Title String. \n%-18s%s\n%-18s%s: %s\n%-18s%s" % (
+ 'Root Note Title: ', rootTitleStr, 'Child Note: ', enNote.Guid, childBaseTitleStr,
+ 'Duplicate Note: ',
+ self.RootNotesMissing.ChildTitlesDict[rootTitleStr][childBaseTitleStr]),
+ crosspost_to_default=False)
+ if not hasattr(self, 'loggedDuplicateChildNotesWarning'):
+ log(
+ " > WARNING: Duplicate Child Notes found when processing Root Notes. See error log for more details")
+ self.loggedDuplicateChildNotesWarning = True
+ self.RootNotesMissing.ChildTitlesDict[rootTitleStr][childBaseTitleStr] = enNote.Guid
+ self.RootNotesMissing.ChildNotesDict[rootTitleStr][enNote.Guid] = enNote
+ if self.processingFlags.populateRootTitlesList or self.processingFlags.populateRootTitlesDict:
+ if not rootTitleStr in self.RootNotes.TitlesList:
+ self.RootNotes.TitlesList.append(rootTitleStr)
+ if self.processingFlags.populateRootTitlesDict:
+ self.RootNotes.TitlesDict[rootTitleStr][enNote.Guid] = enNote.Title.Base
+ self.RootNotes.NotesDict[rootTitleStr][enNote.Guid] = enNote
+ if self.processingFlags.populateChildRootTitles or self.processingFlags.populateExistingRootTitlesList or self.processingFlags.populateExistingRootTitlesDict:
+ if enNote.IsRoot:
+ rootTitle = enNote.Title
+ rootTitleStr = generateTOCTitle(rootTitle)
+ rootGuid = enNote.Guid
+ if self.processingFlags.populateExistingRootTitlesList or self.processingFlags.populateExistingRootTitlesDict or self.processingFlags.populateMissingRootTitlesList:
+ if not rootTitleStr in self.RootNotesExisting.TitlesList:
+ self.RootNotesExisting.TitlesList.append(rootTitleStr)
+ if self.processingFlags.populateChildRootTitles:
+ childNotes = db.execute("title LIKE ? || ':%' ORDER BY title ASC", rootTitleStr)
+ child_count = 0
+ for childDbNote in childNotes:
+ child_count += 1
+ childGuid = childDbNote['guid']
+ childEnNote = EvernoteNotePrototype(db_note=childDbNote)
+ if child_count is 1:
+ self.RootNotesChildren.TitlesDict[rootGuid] = {}
+ self.RootNotesChildren.NotesDict[rootGuid] = {}
+ childBaseTitle = childEnNote.Title.Base
+ self.RootNotesChildren.TitlesDict[rootGuid][childGuid] = childBaseTitle
+ self.RootNotesChildren.NotesDict[rootGuid][childGuid] = childEnNote
+
+ def processNotes(self, populateRootTitlesList=True, populateRootTitlesDict=True):
+ if self.processingFlags.populateRootTitlesList or self.processingFlags.populateRootTitlesDict:
+ self.RootNotes = EvernoteNotesCollection()
+
+ self.processingFlags.populateRootTitlesList = populateRootTitlesList
+ self.processingFlags.populateRootTitlesDict = populateRootTitlesDict
+
+ for guid, enNote in self.Notes:
+ self.processNote(enNote)
+
+ def processAllChildNotes(self):
+ self.processingFlags.populateRootTitlesList = True
+ self.processingFlags.populateRootTitlesDict = True
+ self.processNotes()
+
+ def populateAllRootTitles(self):
+ self.getChildNotes()
+ self.processAllRootTitles()
+
+ def processAllRootTitles(self):
+ count = 0
+ for rootTitle, baseTitles in self.RootNotes.TitlesDict.items():
+ count += 1
+ baseNoteCount = len(baseTitles)
+ query = "UPPER(title) = '%s'" % escape_text_sql(rootTitle).upper()
+ if self.processingFlags.ignoreAutoTOCAsRootTitle:
+ query += " AND tagNames NOT LIKE '%%,%s,%%'" % TAGS.TOC_AUTO
+ if self.processingFlags.ignoreOutlineAsRootTitle:
+ query += " AND tagNames NOT LIKE '%%,%s,%%'" % TAGS.OUTLINE
+ rootNote = self.getNoteFromDB(query)
+ if rootNote:
+ self.RootNotesExisting.TitlesList.append(rootTitle)
+ else:
+ self.RootNotesMissing.TitlesList.append(rootTitle)
+ print_safe(rootNote, ' TOP LEVEL: [%4d::%2d]: [%7s] ' % (count, baseNoteCount, 'is_toc_outline_str'))
+ # for baseGuid, baseTitle in baseTitles:
+ # pass
+
+ def getChildNotes(self):
+ self.addDbQuery("title LIKE '%%:%%'", 'title ASC')
+
+ def getRootNotes(self):
+ query = "title NOT LIKE '%%:%%'"
+ if self.processingFlags.ignoreAutoTOCAsRootTitle:
+ query += " AND tagNames NOT LIKE '%%,%s,%%'" % TAGS.TOC_AUTO
+ if self.processingFlags.ignoreOutlineAsRootTitle:
+ query += " AND tagNames NOT LIKE '%%,%s,%%'" % TAGS.OUTLINE
+ self.addDbQuery(query, 'title ASC')
+
+ def populateAllPotentialRootNotes(self):
+ self.RootNotesMissing = EvernoteNotesCollection()
+ processingFlags = EvernoteNoteProcessingFlags(False)
+ processingFlags.populateMissingRootTitlesList = True
+ processingFlags.populateMissingRootTitlesDict = True
+ self.processingFlags = processingFlags
+
+ log_banner(" CHECKING FOR ALL POTENTIAL ROOT TITLES ", 'RootTitles\\TOC', clear=True, timestamp=False)
+ log_banner(" CHECKING FOR ISOLATED ROOT TITLES ", 'RootTitles\\Isolated', clear=True, timestamp=False)
+ self.getChildNotes()
+ log("Total %d Missing Root Titles" % len(self.RootNotesMissing.TitlesList), 'RootTitles\\TOC',
+ timestamp=False)
+ self.RootNotesMissing.TitlesList = sorted(self.RootNotesMissing.TitlesList, key=lambda s: s.lower())
+
+ return self.processAllRootNotesMissing()
+
+ def populateAllNonCustomRootNotes(self):
+ return self.populateAllRootNotesMissing(True, True)
+
+ def populateAllRootNotesMissing(self, ignoreAutoTOCAsRootTitle=False, ignoreOutlineAsRootTitle=False):
+ processingFlags = EvernoteNoteProcessingFlags(False)
+ processingFlags.populateMissingRootTitlesList = True
+ processingFlags.populateMissingRootTitlesDict = True
+ processingFlags.populateExistingRootTitlesList = True
+ processingFlags.populateExistingRootTitlesDict = True
+ processingFlags.ignoreAutoTOCAsRootTitle = ignoreAutoTOCAsRootTitle
+ processingFlags.ignoreOutlineAsRootTitle = ignoreOutlineAsRootTitle
+ self.processingFlags = processingFlags
+ self.RootNotesExisting = EvernoteNotesCollection()
+ self.RootNotesMissing = EvernoteNotesCollection()
+ # log(', '.join(self.RootNotesMissing.TitlesList))
+ self.getRootNotes()
+
+ log_banner(" CHECKING FOR MISSING ROOT TITLES ", 'RootTitles\\Missing', clear=True, timestamp=False)
+ log_banner(" CHECKING FOR ISOLATED ROOT TITLES ", 'RootTitles\\Isolated', clear=True, timestamp=False)
+ log("Total %d Existing Root Titles" % len(self.RootNotesExisting.TitlesList), 'RootTitles\\Missing',
+ timestamp=False)
+ self.getChildNotes()
+ log("Total %d Missing Root Titles" % len(self.RootNotesMissing.TitlesList), 'RootTitles\\Missing',
+ timestamp=False)
+ self.RootNotesMissing.TitlesList = sorted(self.RootNotesMissing.TitlesList, key=lambda s: s.lower())
+
+ return self.processAllRootNotesMissing()
+
+ def processAllRootNotesMissing(self):
+ """:rtype : list[EvernoteTOCEntry]"""
+ DEBUG_HTML = False
+ # log (" CREATING TOC's " , 'tocList', clear=True, timestamp=False)
+ # log ("------------------------------------------------" , 'tocList', timestamp=False)
+ # if DEBUG_HTML: log('CREATING TOCs
', 'extra\\logs\\toc-ols\\toc-index.htm', timestamp=False, clear=True, extension='htm')
+ ols = []
+ dbRows = []
+ returns = []
+ """:type : list[EvernoteTOCEntry]"""
+ db = ankDB(TABLES.TOC_AUTO)
+ db.delete("1", table=db.table)
+ db.commit()
+ # olsz = None
+ tmr = stopwatch.Timer(self.RootNotesMissing.TitlesList, infoStr='Processing Root Notes', label='RootTitles\\')
+ for rootTitleStr in self.RootNotesMissing.TitlesList:
+ count_child = 0
+ childTitlesDictSortedKeys = sorted(self.RootNotesMissing.ChildTitlesDict[rootTitleStr],
+ key=lambda s: s.lower())
+ total_child = len(childTitlesDictSortedKeys)
+ tags = []
+ outline = self.getNoteFromDB("UPPER(title) = '%s' AND tagNames LIKE '%%,%s,%%'" % (
+ escape_text_sql(rootTitleStr.upper()), TAGS.OUTLINE))
+ currentAutoNote = self.getNoteFromDB("UPPER(title) = '%s' AND tagNames LIKE '%%,%s,%%'" % (
+ escape_text_sql(rootTitleStr.upper()), TAGS.TOC_AUTO))
+ notebookGuids = {}
+ childGuid = None
+ is_isolated = total_child is 1 and not outline
+ if is_isolated:
+ tmr.counts.isolated.step()
+ childBaseTitle = childTitlesDictSortedKeys[0]
+ childGuid = self.RootNotesMissing.ChildTitlesDict[rootTitleStr][childBaseTitle]
+ enChildNote = self.RootNotesMissing.ChildNotesDict[rootTitleStr][childGuid]
+ # tags = enChildNote.Tags
+ log(" > ISOLATED ROOT TITLE: [%-3d]: %-60s --> %-40s: %s" % (
+ tmr.counts.isolated.val, rootTitleStr + ':', childBaseTitle, childGuid), tmr.label + 'Isolated',
+ timestamp=False)
+ else:
+ tmr.counts.created.completed.step()
+ log_blank(tmr.label + 'TOC')
+ log(" [%-3d] %s %s" % (tmr.count, rootTitleStr, '(O)' if outline else ' '), tmr.label + 'TOC',
+ timestamp=False)
+
+ tmr.step(rootTitleStr)
+
+ if is_isolated:
+ continue
+
+ tocHierarchy = TOCHierarchyClass(rootTitleStr)
+ if outline:
+ tocHierarchy.Outline = TOCHierarchyClass(note=outline)
+ tocHierarchy.Outline.parent = tocHierarchy
+
+ for childBaseTitle in childTitlesDictSortedKeys:
+ count_child += 1
+ childGuid = self.RootNotesMissing.ChildTitlesDict[rootTitleStr][childBaseTitle]
+ enChildNote = self.RootNotesMissing.ChildNotesDict[rootTitleStr][childGuid]
+ if count_child == 1:
+ tags = enChildNote.Tags
+ else:
+ tags = [x for x in tags if x in enChildNote.Tags]
+ if not enChildNote.NotebookGuid in notebookGuids:
+ notebookGuids[enChildNote.NotebookGuid] = 0
+ notebookGuids[enChildNote.NotebookGuid] += 1
+ level = enChildNote.Title.Level
+ # childName = enChildNote.Title.Name
+ # childTitle = enChildNote.FullTitle
+ log(" %2d: %d. --> %-60s" % (count_child, level, childBaseTitle),
+ tmr.label + 'TOC', timestamp=False)
+ # tocList.generateEntry(childTitle, enChildNote)
+ tocHierarchy.addNote(enChildNote)
+ realTitle = get_evernote_title_from_guid(childGuid)
+ realTitle = realTitle[0:realTitle.index(':')]
+ # realTitleUTF8 = realTitle.encode('utf8')
+ notebookGuid = sorted(notebookGuids.items(), key=itemgetter(1), reverse=True)[0][0]
+
+ real_root_title = generateTOCTitle(realTitle)
+
+ ol = tocHierarchy.GetOrderedList()
+ tocEntry = EvernoteTOCEntry(real_root_title, ol, ',' + ','.join(tags) + ',', notebookGuid)
+ returns.append(tocEntry)
+ dbRows.append(tocEntry.items())
+
+ if not DEBUG_HTML:
+ continue
+
+ # ols.append(ol)
+ # olutf8 = encode(ol)
+ # fn = 'toc-ols\\toc-' + str(tmr.count) + '-' + rootTitleStr.replace('\\', '_') + '.htm'
+ # full_path = os.path.join(FOLDERS.LOGS, fn)
+ # if not os.path.exists(os.path.dirname(full_path)):
+ # os.mkdir(os.path.dirname(full_path))
+ # file_object = open(full_path, 'w')
+ # file_object.write(olutf8)
+ # file_object.close()
+
+ # if DEBUG_HTML: log(ol, 'toc-ols\\toc-' + str(count) + '-' + rootTitleStr.replace('\\', '_'), timestamp=False, clear=True, extension='htm')
+ # log("Created TOC #%d:\n%s\n\n" % (count, str_), 'tocList', timestamp=False)
+ if DEBUG_HTML:
+ ols_html = u'\r\n
\r\n'.join(ols)
+ fn = 'toc-ols\\toc-index.htm'
+ file_object = open(os.path.join(FOLDERS.LOGS, fn), 'w')
+ try:
+ file_object.write(u'CREATING TOCs
\n\n' + ols_html)
+ except Exception:
+ try:
+ file_object.write(u'CREATING TOCs
\n\n' + encode(ols_html))
+ except Exception:
+ pass
+
+ file_object.close()
+
+ db.executemany("INSERT INTO {t} (root_title, contents, tagNames, notebookGuid) VALUES(?, ?, ?, ?)", dbRows)
+ db.commit()
+
+ return returns
+
+ def populateAllRootNotesWithoutTOCOrOutlineDesignation(self):
+ processingFlags = EvernoteNoteProcessingFlags()
+ processingFlags.populateRootTitlesList = False
+ processingFlags.populateRootTitlesDict = False
+ processingFlags.populateChildRootTitles = True
+ self.processingFlags = processingFlags
+ self.getRootNotes()
+ self.processAllRootNotesWithoutTOCOrOutlineDesignation()
+
+ def processAllRootNotesWithoutTOCOrOutlineDesignation(self):
+ count = 0
+ for rootGuid, childBaseTitleDicts in self.RootNotesChildren.TitlesDict.items():
+ rootEnNote = self.Notes[rootGuid]
+ if len(childBaseTitleDicts.items()) > 0:
+ is_toc = TAGS.TOC in rootEnNote.Tags
+ is_outline = TAGS.OUTLINE in rootEnNote.Tags
+ is_both = is_toc and is_outline
+ is_none = not is_toc and not is_outline
+ is_toc_outline_str = "BOTH ???" if is_both else "TOC" if is_toc else "OUTLINE" if is_outline else "N/A"
+ if is_none:
+ count += 1
+ print_safe(rootEnNote, ' TOP LEVEL: [%3d] %-8s: ' % (count, is_toc_outline_str))
diff --git a/anknotes/README.md b/anknotes/README.md
deleted file mode 100644
index 0204cc8..0000000
--- a/anknotes/README.md
+++ /dev/null
@@ -1,23 +0,0 @@
-# Evernote2Anki Importer (beta)
-**Forks and suggestions are very welcome.**
-
-## Description
-An Anki plug-in aiming for syncing evernote account with anki directly from anki. It aims to replace a Java standalone application [available here] (https://code.google.com/p/evernote2anki/)
-Very rudimentary for the moment. I wait for suggestions according to the needs of evernote/anki users.
-
-## Users : How to use it
-- download everything, move it to your Anki/addons directory
-- start Anki, fill in the Infromation in the prefrences tap and then press Import from Evernote
--When you run it the first Time a browser tab will open on the evernote site asking you for access to your account
-- when you click ok you are taken to a website where the oauth verification key is displayed you paste that key into the open anki windows and click ok with that you are set.
-
-## Features and further development
-####Current feature :
-- Import all the notes from evernote with selected tags
-- Possibility to choose the name of the deck, as well as the default tag in anki (but should not be changed)
-- Does not import twice a card (only new cards are imported)
-- - A window allowing the user to change the options (instead of manual edit of options.cfg)
-
-####Desirable new features (?) :
-
-- Updating anki cards accordingly the edit of evernote notes.
diff --git a/anknotes/___sqlite3.py b/anknotes/___sqlite3.py
new file mode 100644
index 0000000..c60baed
--- /dev/null
+++ b/anknotes/___sqlite3.py
@@ -0,0 +1,190 @@
+"""Skeleton for 'sqlite3' stdlib module."""
+
+
+import sqlite3
+
+
+def connect(database, timeout=5.0, detect_types=0, isolation_level=None,
+ check_same_thread=False, factory=None, cached_statements=100):
+ """Opens a connection to the SQLite database file database.
+
+ :type database: bytes | unicode
+ :type timeout: float
+ :type detect_types: int
+ :type isolation_level: string | None
+ :type check_same_thread: bool
+ :type factory: (() -> sqlite3.Connection) | None
+ :rtype: sqlite3.Connection
+ """
+ return sqlite3.Connection()
+
+
+def register_converter(typename, callable):
+ """Registers a callable to convert a bytestring from the database into a
+ custom Python type.
+
+ :type typename: string
+ :type callable: (bytes) -> unknown
+ :rtype: None
+ """
+ pass
+
+
+def register_adapter(type, callable):
+ """Registers a callable to convert the custom Python type type into one of
+ SQLite's supported types.
+
+ :type type: type
+ :type callable: (unknown) -> unknown
+ :rtype: None
+ """
+ pass
+
+
+def complete_statement(sql):
+ """Returns True if the string sql contains one or more complete SQL
+ statements terminated by semicolons.
+
+ :type sql: string
+ :rtype: bool
+ """
+ return False
+
+
+def enable_callback_tracebacks(flag):
+ """By default you will not get any tracebacks in user-defined functions,
+ aggregates, converters, authorizer callbacks etc.
+
+ :type flag: bool
+ :rtype: None
+ """
+ pass
+
+
+class Connection(object):
+ """A SQLite database connection."""
+
+ def cursor(self, cursorClass=None):
+ """
+ :type cursorClass: type | None
+ :rtype: sqlite3.Cursor
+ """
+ return sqlite3.Cursor()
+
+ def execute(self, sql, parameters=()):
+ """This is a nonstandard shortcut that creates an intermediate cursor
+ object by calling the cursor method, then calls the cursor's execute
+ method with the parameters given.
+
+ :type sql: string
+ :type parameters: collections.Iterable
+ :rtype: sqlite3.Cursor
+ """
+ pass
+
+ def executemany(self, sql, seq_of_parameters=()):
+ """This is a nonstandard shortcut that creates an intermediate cursor
+ object by calling the cursor method, then calls the cursor's
+ executemany method with the parameters given.
+
+ :type sql: string
+ :type seq_of_parameters: collections.Iterable[collections.Iterable]
+ :rtype: sqlite3.Cursor
+ """
+ pass
+
+ def executescript(self, sql_script):
+ """This is a nonstandard shortcut that creates an intermediate cursor
+ object by calling the cursor method, then calls the cursor's
+ executescript method with the parameters given.
+
+ :type sql_script: bytes | unicode
+ :rtype: sqlite3.Cursor
+ """
+ pass
+
+ def create_function(self, name, num_params, func):
+ """Creates a user-defined function that you can later use from within
+ SQL statements under the function name name.
+
+ :type name: string
+ :type num_params: int
+ :type func: collections.Callable
+ :rtype: None
+ """
+ pass
+
+
+ def create_aggregate(self, name, num_params, aggregate_class):
+ """Creates a user-defined aggregate function.
+
+ :type name: string
+ :type num_params: int
+ :type aggregate_class: type
+ :rtype: None
+ """
+ pass
+
+ def create_collation(self, name, callable):
+ """Creates a collation with the specified name and callable.
+
+ :type name: string
+ :type callable: collections.Callable
+ :rtype: None
+ """
+ pass
+
+
+class Cursor(object):
+ """A SQLite database cursor."""
+
+ def execute(self, sql, parameters=()):
+ """Executes an SQL statement.
+
+ :type sql: string
+ :type parameters: collections.Iterable
+ :rtype: sqlite3.Cursor
+ """
+ pass
+
+ def executemany(self, sql, seq_of_parameters=()):
+ """Executes an SQL command against all parameter sequences or mappings
+ found in the sequence.
+
+ :type sql: string
+ :type seq_of_parameters: collections.Iterable[collections.Iterable]
+ :rtype: sqlite3.Cursor
+ """
+ pass
+
+ def executescript(self, sql_script):
+ """This is a nonstandard convenience method for executing multiple SQL
+ statements at once.
+
+ :type sql_script: bytes | unicode
+ :rtype: sqlite3.Cursor
+ """
+ pass
+
+ def fetchone(self):
+ """Fetches the next row of a query result set, returning a single
+ sequence, or None when no more data is available.
+
+ :rtype: tuple | None
+ """
+ pass
+
+ def fetchmany(self, size=-1):
+ """Fetches the next set of rows of a query result, returning a list.
+
+ :type size: numbers.Integral
+ :rtype: list[tuple]
+ """
+ return []
+
+ def fetchall(self):
+ """Fetches all (remaining) rows of a query result, returning a list.
+
+ :rtype: list[tuple]
+ """
+ return []
diff --git a/anknotes/__main__.py b/anknotes/__main__.py
index f594b40..4aebb5a 100644
--- a/anknotes/__main__.py
+++ b/anknotes/__main__.py
@@ -1,393 +1,373 @@
+# -*- coding: utf-8 -*-
+### Python Imports
import os
-
-# from thrift.Thrift import *
-from evernote.edam.notestore.ttypes import NoteFilter, NotesMetadataResultSpec
-from evernote.edam.error.ttypes import EDAMSystemException, EDAMErrorCode
-from evernote.api.client import EvernoteClient
-# from evernote.edam.type.ttypes import SavedSearch
-
-import anki
-import aqt
-from anki.hooks import wrap
+import re, sre_constants
+import sys
+
+try:
+ from pysqlite2 import dbapi2 as sqlite
+ is_pysqlite = True
+except ImportError:
+ from sqlite3 import dbapi2 as sqlite
+ is_pysqlite = False
+
+
+### Anknotes Shared Imports
+from anknotes.imports import in_anki
+from anknotes.shared import *
+from anknotes import stopwatch
+
+### Anknotes Main Imports
+from anknotes import menu, settings
+
+### Anki Imports
+if ANKNOTES.HOOKS.SEARCH:
+ from anki.find import Finder
+ from aqt import browser
+if ANKNOTES.HOOKS.DB:
+ from anki.db import DB
+from anki.hooks import wrap, addHook
from aqt.preferences import Preferences
-from aqt.utils import showInfo, getText, openLink, getOnlyText
-from aqt.qt import QLineEdit, QLabel, QVBoxLayout, QGroupBox, SIGNAL, QCheckBox, QComboBox, QSpacerItem, QSizePolicy, QWidget
from aqt import mw
-# from pprint import pprint
-
-
-# Note: This class was adapted from the Real-Time_Import_for_use_with_the_Rikaisama_Firefox_Extension plug-in
-# by cb4960@gmail.com
-# .. itself adapted from Yomichan plugin by Alex Yatskov.
-
-PATH = os.path.dirname(os.path.abspath(__file__))
-EVERNOTE_MODEL = 'evernote_note'
-EVERNOTE_TEMPLATE_NAME = 'EvernoteReview'
-TITLE_FIELD_NAME = 'title'
-CONTENT_FIELD_NAME = 'content'
-GUID_FIELD_NAME = 'Evernote GUID'
-
-SETTING_UPDATE_EXISTING_NOTES = 'evernoteUpdateExistingNotes'
-SETTING_TOKEN = 'evernoteToken'
-SETTING_KEEP_TAGS = 'evernoteKeepTags'
-SETTING_TAGS_TO_IMPORT = 'evernoteTagsToImport'
-SETTING_DEFAULT_TAG = 'evernoteDefaultTag'
-SETTING_DEFAULT_DECK = 'evernoteDefaultDeck'
-
-class UpdateExistingNotes:
- IgnoreExistingNotes, UpdateNotesInPlace, DeleteAndReAddNotes = range(3)
-
-class Anki:
- def update_evernote_cards(self, evernote_cards, tag):
- return self.add_evernote_cards(evernote_cards, None, tag, True)
-
- def add_evernote_cards(self, evernote_cards, deck, tag, update=False):
- count = 0
- model_name = EVERNOTE_MODEL
- for card in evernote_cards:
- anki_field_info = {TITLE_FIELD_NAME: card.front.decode('utf-8'),
- CONTENT_FIELD_NAME: card.back.decode('utf-8'),
- GUID_FIELD_NAME: card.guid}
- card.tags.append(tag)
- if update:
- self.update_note(anki_field_info, card.tags)
- else:
- self.add_note(deck, model_name, anki_field_info, card.tags)
- count += 1
- return count
-
- def delete_anki_cards(self, guid_ids):
- col = self.collection()
- card_ids = []
- for guid in guid_ids:
- card_ids += mw.col.findCards(guid)
- col.remCards(card_ids)
- return len(card_ids)
-
- def update_note(self, fields, tags=list()):
- col = self.collection()
- note_id = col.findNotes(fields[GUID_FIELD_NAME])[0]
- note = anki.notes.Note(col, None, note_id)
- note.tags = tags
- for fld in note._model['flds']:
- if TITLE_FIELD_NAME in fld.get('name'):
- note.fields[fld.get('ord')] = fields[TITLE_FIELD_NAME]
- elif CONTENT_FIELD_NAME in fld.get('name'):
- note.fields[fld.get('ord')] = fields[CONTENT_FIELD_NAME]
- # we dont have to update the evernote guid because if it changes we wont find this note anyway
- note.flush()
- return note.id
-
- def add_note(self, deck_name, model_name, fields, tags=list()):
- note = self.create_note(deck_name, model_name, fields, tags)
- if note is not None:
- collection = self.collection()
- collection.addNote(note)
- collection.autosave()
- self.start_editing()
- return note.id
-
- def create_note(self, deck_name, model_name, fields, tags=list()):
- id_deck = self.decks().id(deck_name)
- model = self.models().byName(model_name)
- col = self.collection()
- note = anki.notes.Note(col, model)
- note.model()['did'] = id_deck
- note.tags = tags
- for name, value in fields.items():
- note[name] = value
- return note
-
- def add_evernote_model(self): # adapted from the IREAD plug-in from Frank
- col = self.collection()
- mm = col.models
- evernote_model = mm.byName(EVERNOTE_MODEL)
- if evernote_model is None:
- evernote_model = mm.new(EVERNOTE_MODEL)
- # Field for title:
- model_field = mm.newField(TITLE_FIELD_NAME)
- mm.addField(evernote_model, model_field)
- # Field for text:
- text_field = mm.newField(CONTENT_FIELD_NAME)
- mm.addField(evernote_model, text_field)
- # Field for source:
- guid_field = mm.newField(GUID_FIELD_NAME)
- guid_field['sticky'] = True
- mm.addField(evernote_model, guid_field)
- # Add template
- t = mm.newTemplate(EVERNOTE_TEMPLATE_NAME)
- t['qfmt'] = "{{" + TITLE_FIELD_NAME + "}}"
- t['afmt'] = "{{" + CONTENT_FIELD_NAME + "}}"
- mm.addTemplate(evernote_model, t)
- mm.add(evernote_model)
- return evernote_model
+from aqt.qt import Qt, QIcon, QTreeWidget, QTreeWidgetItem, QDesktopServices, QUrl
+from anki.utils import ids2str, splitFields
+
+def import_timer_toggle():
+ title = "&Enable Auto Import On Profile Load"
+ doAutoImport = mw.col.conf.get(
+ SETTINGS.ANKNOTES_CHECKABLE_MENU_ITEMS_PREFIX + '_' + title.replace(' ', '_').replace('&', ''), False)
+ if not doAutoImport:
+ return
+ lastImport = SETTINGS.EVERNOTE.LAST_IMPORT.fetch()
+ importDelay = 0
+ if lastImport:
+ td = (datetime.now() - datetime.strptime(lastImport, ANKNOTES.DATE_FORMAT))
+ minimum = timedelta(seconds=max(EVERNOTE.IMPORT.INTERVAL, 20 * 60))
+ if td < minimum:
+ importDelay = (minimum - td).total_seconds()
+ if importDelay is 0:
+ return menu.import_from_evernote()
+ m, s = divmod(importDelay, 60)
+ log("> Starting Auto Import, Triggered by Profile Load, in %d:%02d min" % (m, s))
+ return create_timer(importDelay, menu.import_from_evernote)
+
+def _findEdited((val, args)):
+ try:
+ days = int(val)
+ except ValueError:
+ return None
+ return "c.mod > %d" % (time.time() - days * 86400)
+
+
+def _findAnknotes((val, args)):
+ tmr = stopwatch.Timer(label='finder\\findAnknotes', begin=False)
+ log_banner("FINDANKNOTES SEARCH: " + val.upper().replace('_', ' '), tmr.label, append_newline=False, clear=False)
+ if not hasattr(_findAnknotes, 'note_ids'):
+ _findAnknotes.note_ids = {}
+ if val == 'hierarchical' or val == 'hierarchical_alt' and (
+ val not in _findAnknotes.note_ids or not ANKNOTES.CACHE_SEARCHES):
+ tmr.reset()
+ val_root = val.replace('hierarchical', 'root')
+ val_child = val.replace('hierarchical', 'child')
+ _findAnknotes((val_root, None), )
+ _findAnknotes((val_child, None), )
+ _findAnknotes.note_ids[val] = _findAnknotes.note_ids[val_root] + _findAnknotes.note_ids[val_child]
+ write_file_contents(" > %s Search Complete: ".ljust(25) % val.upper().replace('_', ' ') + "%-5s --> %3d results" % (
+ tmr.str_long, len(_findAnknotes.note_ids[val])), tmr.label)
+
+ if not hasattr(_findAnknotes, 'queries'):
+ _findAnknotes.queries = {
+ 'all': get_evernote_model_ids(True),
+ 'sub': 'n.sfld like "%:%"',
+ 'root_alt': "n.sfld NOT LIKE '%:%' AND ank.title LIKE n.sfld || ':%'",
+ 'child_alt': "n.sfld LIKE '%%:%%' AND UPPER(SUBSTR(n.sfld, 0, INSTR(n.sfld, ':'))) IN (SELECT UPPER(title) FROM %s WHERE title NOT LIKE '%%:%%' AND tagNames LIKE '%%,%s,%%') " % (
+ TABLES.EVERNOTE.NOTES, TAGS.TOC),
+ 'orphan_alt': "n.sfld LIKE '%%:%%' AND UPPER(SUBSTR(n.sfld, 0, INSTR(n.sfld, ':'))) NOT IN (SELECT UPPER(title) FROM %s WHERE title NOT LIKE '%%:%%' AND tagNames LIKE '%%,%s,%%') " % (
+ TABLES.EVERNOTE.NOTES, TAGS.TOC)
+ }
+
+ if val not in _findAnknotes.note_ids or (not ANKNOTES.CACHE_SEARCHES and 'hierarchical' not in val):
+ tmr.reset()
+ if val == 'root':
+ _findAnknotes.note_ids[val] = get_anknotes_root_notes_nids()
+ elif val == 'child':
+ _findAnknotes.note_ids[val] = get_anknotes_child_notes_nids()
+ elif val == 'orphan':
+ _findAnknotes.note_ids[val] = get_anknotes_orphan_notes_nids()
+ elif val in _findAnknotes.queries:
+ pred = _findAnknotes.queries[val]
+ col = 'n.id'
+ table = 'notes n'
+ if 'ank.' in pred:
+ col = 'DISTINCT ' + col
+ table += ', %s ank' % TABLES.EVERNOTE.NOTES
+ sql = 'select %s from %s where ' % (col, table) + pred
+ _findAnknotes.note_ids[val] = ankDB().list(sql)
else:
- fmap = mm.fieldMap(evernote_model)
- title_ord, title_field = fmap[TITLE_FIELD_NAME]
- text_ord, text_field = fmap[CONTENT_FIELD_NAME]
- source_ord, source_field = fmap[GUID_FIELD_NAME]
- source_field['sticky'] = False
-
- def get_guids_from_anki_id(self, ids):
- guids = []
- for a_id in ids:
- card = self.collection().getCard(a_id)
- items = card.note().items()
- if len(items) == 3:
- guids.append(items[2][1]) # not a very smart access
- return guids
-
- def can_add_note(self, deck_name, model_name, fields):
- return bool(self.create_note(deck_name, model_name, fields))
-
- def get_cards_id_from_tag(self, tag):
- query = "tag:" + tag
- ids = self.collection().findCards(query)
- return ids
-
- def start_editing(self):
- self.window().requireReset()
-
- def stop_editing(self):
- if self.collection():
- self.window().maybeReset()
-
- def window(self):
- return aqt.mw
-
- def collection(self):
- return self.window().col
-
- def models(self):
- return self.collection().models
-
- def decks(self):
- return self.collection().decks
-
-
-class EvernoteCard:
- front = ""
- back = ""
- guid = ""
-
- def __init__(self, q, a, g, tags):
- self.front = q
- self.back = a
- self.guid = g
- self.tags = tags
-
-
-class Evernote:
- def __init__(self):
- if not mw.col.conf.get(SETTING_TOKEN, False):
- # First run of the Plugin we did not save the access key yet
- client = EvernoteClient(
- consumer_key='scriptkiddi-2682',
- consumer_secret='965f1873e4df583c',
- sandbox=False
- )
- request_token = client.get_request_token('https://fap-studios.de/anknotes/index.html')
- url = client.get_authorize_url(request_token)
- showInfo("We will open a Evernote Tab in your browser so you can allow access to your account")
- openLink(url)
- oauth_verifier = getText(prompt="Please copy the code that showed up, after allowing access, in here")[0]
- auth_token = client.get_access_token(
- request_token.get('oauth_token'),
- request_token.get('oauth_token_secret'),
- oauth_verifier)
- mw.col.conf[SETTING_TOKEN] = auth_token
+ return None
+ write_file_contents(" > Cached %s Note IDs: ".ljust(25) % val + "%-5s --> %3d results" % (
+ tmr.str_long, len(_findAnknotes.note_ids[val])), tmr.label)
+ else:
+ write_file_contents(" > Retrieving %3d %s Note IDs from Cache" % (len(_findAnknotes.note_ids[val]), val), tmr.label)
+ log_blank(tmr.label)
+ return "c.nid IN %s" % ids2str(_findAnknotes.note_ids[val])
+
+
+class CallbackItem(QTreeWidgetItem):
+ def __init__(self, root, name, onclick, oncollapse=None):
+ QTreeWidgetItem.__init__(self, root, [name])
+ self.onclick = onclick
+ self.oncollapse = oncollapse
+
+
+def anknotes_browser_get_icon(icon=None):
+ if icon:
+ return QIcon(":/icons/" + icon)
+ if not hasattr(anknotes_browser_get_icon, 'default_icon'):
+ from anknotes.graphics import icoEvernoteWeb
+ anknotes_browser_get_icon.default_icon = icoEvernoteWeb
+ return anknotes_browser_get_icon.default_icon
+
+
+def anknotes_browser_add_treeitem(self, tree, name, cmd, icon=None, index=None, root=None):
+ if root is None:
+ root = tree
+ def onclick(c=cmd): return self.setFilter(c)
+ if index:
+ widgetItem = QTreeWidgetItem([_(name)])
+ widgetItem.onclick = onclick
+ widgetItem.setIcon(0, anknotes_browser_get_icon(icon))
+ root.insertTopLevelItem(index, widgetItem)
+ return root, tree
+ item = self.CallbackItem(tree, _(name), onclick)
+ item.setIcon(0, anknotes_browser_get_icon(icon))
+ return root, tree
+
+
+def anknotes_browser_add_tree(self, tree, items, root=None, name=None, icon=None):
+ if root is None:
+ root = tree
+ for item in items:
+ if isinstance(item[1], list):
+ new_name = item[0]
+ # write_file_contents('Tree: Name: %s: \n' % str(new_name) + repr(item))
+ new_tree = self.CallbackItem(tree, _(new_name), None)
+ new_tree.setExpanded(True)
+ new_tree.setIcon(0, anknotes_browser_get_icon(icon))
+ root = anknotes_browser_add_tree(self, new_tree, item[1], root, new_name, icon)
else:
- auth_token = mw.col.conf.get(SETTING_TOKEN, False)
- self.token = auth_token
- self.client = EvernoteClient(token=auth_token, sandbox=False)
- self.noteStore = self.client.get_note_store()
-
- def find_tag_guid(self, tag):
- list_tags = self.noteStore.listTags()
- for evernote_tag in list_tags:
- if str(evernote_tag.name).strip() == str(tag).strip():
- return evernote_tag.guid
-
- def create_evernote_cards(self, guid_set):
- cards = []
- for guid in guid_set:
- note_info = self.get_note_information(guid)
- if note_info is None:
- return cards
- title, content, tags = note_info
- cards.append(EvernoteCard(title, content, guid, tags))
- return cards
-
- def find_notes_filter_by_tag_guids(self, guids_list):
- evernote_filter = NoteFilter()
- evernote_filter.ascending = False
- evernote_filter.tagGuids = guids_list
- spec = NotesMetadataResultSpec()
- spec.includeTitle = True
- note_list = self.noteStore.findNotesMetadata(self.token, evernote_filter, 0, 10000, spec)
- guids = []
- for note in note_list.notes:
- guids.append(note.guid)
- return guids
-
- def get_note_information(self, note_guid):
- tags = []
- try:
- whole_note = self.noteStore.getNote(self.token, note_guid, True, True, False, False)
- if mw.col.conf.get(SETTING_KEEP_TAGS, False):
- tags = self.noteStore.getNoteTagNames(self.token, note_guid)
- except EDAMSystemException, e:
- if e.errorCode == EDAMErrorCode.RATE_LIMIT_REACHED:
- m, s = divmod(e.rateLimitDuration, 60)
- showInfo("Rate limit has been reached. We will save the notes downloaded thus far.\r\n"
- "Please retry your request in {} min".format("%d:%02d" % (m, s)))
- return None
- raise
- return whole_note.title, whole_note.content, tags
-
-
-class Controller:
- def __init__(self):
- self.evernoteTags = mw.col.conf.get(SETTING_TAGS_TO_IMPORT, "").split(",")
- self.ankiTag = mw.col.conf.get(SETTING_DEFAULT_TAG, "anknotes")
- self.deck = mw.col.conf.get(SETTING_DEFAULT_DECK, "Default")
- self.updateExistingNotes = mw.col.conf.get(SETTING_UPDATE_EXISTING_NOTES,
- UpdateExistingNotes.UpdateNotesInPlace)
- self.anki = Anki()
- self.anki.add_evernote_model()
- self.evernote = Evernote()
-
- def proceed(self):
- anki_ids = self.anki.get_cards_id_from_tag(self.ankiTag)
- anki_guids = self.anki.get_guids_from_anki_id(anki_ids)
- evernote_guids = self.get_evernote_guids_from_tag(self.evernoteTags)
- cards_to_add = set(evernote_guids) - set(anki_guids)
- cards_to_update = set(evernote_guids) - set(cards_to_add)
- self.anki.start_editing()
- n = self.import_into_anki(cards_to_add, self.deck, self.ankiTag)
- if self.updateExistingNotes is UpdateExistingNotes.IgnoreExistingNotes:
- show_tooltip("{} new card(s) have been imported. Updating is disabled.".format(str(n)))
+ # write_file_contents('Tree Item: Name: %s: \n' % str(name) + repr(item))
+ root, tree = anknotes_browser_add_treeitem(self, tree, *item, root=root)
+ return root
+
+
+def anknotes_browser_tagtree_wrap(self, root, _old):
+ """
+
+ :param root:
+ :type root : QTreeWidget
+ :param _old:
+ :return:
+ """
+ root = _old(self, root)
+ indices = root.findItems(_("Added Today"), Qt.MatchFixedString)
+ index = (root.indexOfTopLevelItem(indices[0]) + 1) if indices else 3
+ tags = \
+ [
+ ["Edited This Week", "edited:7", "view-pim-calendar.png", index],
+ ["Anknotes",
+ [
+ ["All Anknotes", "anknotes:all"],
+ ["Hierarchy",
+ [
+ ["All Hierarchical Notes", "anknotes:hierarchical"],
+ ["Root Notes", "anknotes:root"],
+ ["Sub Notes", "anknotes:sub"],
+ ["Child Notes", "anknotes:child"],
+ ["Orphan Notes", "anknotes:orphan"]
+ ]
+ ],
+ # ["Hierarchy: Alt",
+ # [
+ # ["All Hierarchical Notes", "anknotes:hierarchical_alt"],
+ # ["Root Notes", "anknotes:root_alt"],
+ # ["Child Notes", "anknotes:child_alt"],
+ # ["Orphan Notes", "anknotes:orphan_alt"]
+ # ]
+ # ],
+ ["Front Cards", "card:1"]
+ ]
+ ]
+ ]
+
+ return anknotes_browser_add_tree(self, root, tags)
+
+
+def anknotes_finder_findCards_wrap(self, query, order=False, _old=None):
+ tmr = stopwatch.Timer(label='finder\\findCards')
+ log_banner("FINDCARDS SEARCH: " + query, tmr.label, append_newline=False, clear=False)
+ tokens = self._tokenize(query)
+ preds, args = self._where(tokens)
+ write_file_contents('Tokens: '.ljust(25) + ', '.join(tokens), tmr.label)
+ if args:
+ write_file_contents('Args: '.ljust(25) + ', '.join(tokens), tmr.label)
+ if preds is None:
+ write_file_contents('Preds: '.ljust(25) + '', tmr.label)
+ log_blank(tmr.label)
+ return []
+
+ order, rev = self._order(order)
+ sql = self._query(preds, order)
+ try:
+ res = self.col.db.list(sql, *args)
+ except Exception as ex:
+ # invalid grouping
+ log_error("Error with findCards Query %s: %s.\n%s" % (query, str(ex), [sql, args]), crosspost=tmr.label)
+ return []
+ if rev:
+ res.reverse()
+ write_file_contents("FINDCARDS DONE: ".ljust(25) + "%-5s --> %3d results" % (tmr.str_long, len(res)), tmr.label)
+ log_blank(tmr.label)
+ return res
+ return _old(self, query, order)
+
+
+def anknotes_finder_query_wrap(self, preds=None, order=None, _old=None):
+ if _old is None or not isinstance(self, Finder):
+ log_dump([self, preds, order], 'Finder Query Wrap Error', 'finder\\error', crosspost_to_default=False)
+ return
+ sql = _old(self, preds, order)
+ if "ank." in preds:
+ sql = sql.replace("select c.id", "select distinct c.id").replace("from cards c",
+ "from cards c, %s ank" % TABLES.EVERNOTE.NOTES)
+ write_file_contents('Custom anknotes finder SELECT query: \n%s' % sql, 'finder\\ank-query')
+ elif TABLES.EVERNOTE.NOTES in preds:
+ write_file_contents('Custom anknotes finder alternate query: \n%s' % sql, 'finder\\ank-query')
+ else:
+ write_file_contents("Anki finder query: %s" % sql[:100], 'finder\\query')
+ return sql
+
+
+def anknotes_search_hook(search):
+ anknotes_search = {'edited': _findEdited, 'anknotes': _findAnknotes}
+ for key, value in anknotes_search.items():
+ if key not in search:
+ search[key] = anknotes_search[key]
+
+def reset_everything(upload=True):
+ show_tooltip_enabled = show_tooltip.enabled if hasattr(show_tooltip, 'enabled') else None
+ show_tooltip.enabled = False
+ ankDB().InitSeeAlso(True)
+ menu.resync_with_local_db()
+ menu.see_also(upload=upload)
+ show_tooltip.enabled = show_tooltip_enabled
+
+
+def anknotes_profile_loaded():
+ # write_file_contents('%s: anknotes_profile_loaded' % __name__, 'load')
+ last_profile_dir = os.path.dirname(FILES.USER.LAST_PROFILE_LOCATION)
+ if not os.path.exists(last_profile_dir):
+ os.makedirs(last_profile_dir)
+ with open(FILES.USER.LAST_PROFILE_LOCATION, 'w+') as myFile:
+ print>> myFile, mw.pm.name,
+ # write_file_contents('%s: anknotes_profile_loaded: menu.anknotes_load_menu_settings' % __name__, 'load')
+ menu.anknotes_load_menu_settings()
+ if EVERNOTE.UPLOAD.VALIDATION.ENABLED and EVERNOTE.UPLOAD.VALIDATION.AUTOMATED:
+ # write_file_contents('%s: anknotes_profile_loaded: menu.upload_validated_notes' % __name__, 'load')
+ menu.upload_validated_notes(True)
+ if ANKNOTES.UPDATE_DB_ON_START:
+ # write_file_contents('%s: anknotes_profile_loaded: update_anknotes_nids' % __name__, 'load')
+ update_anknotes_nids()
+ # write_file_contents('%s: anknotes_profile_loaded: import_timer_toggle' % __name__, 'load')
+ import_timer_toggle()
+ if ANKNOTES.DEVELOPER_MODE.AUTOMATED:
+ '''
+ For testing purposes only!
+ Add a function here and it will automatically run on profile load
+ You must create the files 'anknotes.developer' and 'anknotes.developer.automate' in the /extra/dev/ folder
+ '''
+ # write_file_contents('%s: anknotes_profile_loaded: ANKNOTES.DEVELOPER_MODE.AUTOMATED' % __name__, 'load')
+ # menu.lxml_test()
+ # menu.see_also([8])
+ # menu.see_also(upload=False)
+ reset_everything(False)
+ # menu.see_also(set(range(0,10)) - {3,4,8})
+ # ankDB().InitSeeAlso(True)
+ # menu.resync_with_local_db()
+ # menu.see_also([1, 2, 6, 7, 9])
+ # menu.lxml_test()
+ # menu.see_also()
+ # reset_everything()
+ # menu.import_from_evernote(auto_page_callback=lambda: lambda: menu.see_also(3))
+ # mw.progress.timer(20000, lambda : menu.find_deleted_notes(True), False)
+ pass
+
+def anknotes_scalar(self, *a, **kw):
+ log_text = 'Call to DB.scalar():'
+ if not isinstance(self, DB):
+ log_text += '\n - Self: ' + pf(self)
+ if a:
+ log_text += '\n - Args: ' + pf(a)
+ if kw:
+ log_text += '\n - KWArgs: ' + pf(kw)
+ last_query=''
+ if hasattr(self, 'ank_lastquery'):
+ last_query = self.ank_lastquery
+ if is_str_type(last_query):
+ last_query = last_query[:50]
else:
- n2 = len(cards_to_update)
- if self.updateExistingNotes is UpdateExistingNotes.UpdateNotesInPlace:
- update_str = "in-place"
- self.update_in_anki(cards_to_update, self.ankiTag)
- else:
- update_str = "(deleted and re-added)"
- self.anki.delete_anki_cards(cards_to_update)
- self.import_into_anki(cards_to_update, self.deck, self.ankiTag)
- show_tooltip("{} new card(s) have been imported and {} existing card(s) have been updated {}."
- .format(str(n), str(n2), update_str))
- self.anki.stop_editing()
- self.anki.collection().autosave()
-
- def update_in_anki(self, guid_set, tag):
- cards = self.evernote.create_evernote_cards(guid_set)
- number = self.anki.update_evernote_cards(cards, tag)
- return number
-
- def import_into_anki(self, guid_set, deck, tag):
- cards = self.evernote.create_evernote_cards(guid_set)
- number = self.anki.add_evernote_cards(cards, deck, tag)
- return number
-
- def get_evernote_guids_from_tag(self, tags):
- note_guids = []
- for tag in tags:
- tag_guid = self.evernote.find_tag_guid(tag)
- if tag_guid is not None:
- note_guids += self.evernote.find_notes_filter_by_tag_guids([tag_guid])
- return note_guids
-
-
-def show_tooltip(text, time_out=3000):
- aqt.utils.tooltip(text, time_out)
-
-
-def main():
- controller = Controller()
- controller.proceed()
-
-
-action = aqt.qt.QAction("Import from Evernote", aqt.mw)
-aqt.mw.connect(action, aqt.qt.SIGNAL("triggered()"), main)
-aqt.mw.form.menuTools.addAction(action)
-
-
-def setup_evernote(self):
- global evernote_default_deck
- global evernote_default_tag
- global evernote_tags_to_import
- global keep_evernote_tags
- global update_existing_notes
-
- widget = QWidget()
- layout = QVBoxLayout()
-
- # Default Deck
- evernote_default_deck_label = QLabel("Default Deck:")
- evernote_default_deck = QLineEdit()
- evernote_default_deck.setText(mw.col.conf.get(SETTING_DEFAULT_DECK, ""))
- layout.insertWidget(int(layout.count()) + 1, evernote_default_deck_label)
- layout.insertWidget(int(layout.count()) + 2, evernote_default_deck)
- evernote_default_deck.connect(evernote_default_deck, SIGNAL("editingFinished()"), update_evernote_default_deck)
-
- # Default Tag
- evernote_default_tag_label = QLabel("Default Tag:")
- evernote_default_tag = QLineEdit()
- evernote_default_tag.setText(mw.col.conf.get(SETTING_DEFAULT_TAG, ""))
- layout.insertWidget(int(layout.count()) + 1, evernote_default_tag_label)
- layout.insertWidget(int(layout.count()) + 2, evernote_default_tag)
- evernote_default_tag.connect(evernote_default_tag, SIGNAL("editingFinished()"), update_evernote_default_tag)
-
- # Tags to Import
- evernote_tags_to_import_label = QLabel("Tags to Import:")
- evernote_tags_to_import = QLineEdit()
- evernote_tags_to_import.setText(mw.col.conf.get(SETTING_TAGS_TO_IMPORT, ""))
- layout.insertWidget(int(layout.count()) + 1, evernote_tags_to_import_label)
- layout.insertWidget(int(layout.count()) + 2, evernote_tags_to_import)
- evernote_tags_to_import.connect(evernote_tags_to_import,
- SIGNAL("editingFinished()"),
- update_evernote_tags_to_import)
-
- # Keep Evernote Tags
- keep_evernote_tags = QCheckBox("Keep Evernote Tags", self)
- keep_evernote_tags.setChecked(mw.col.conf.get(SETTING_KEEP_TAGS, False))
- keep_evernote_tags.stateChanged.connect(update_evernote_keep_tags)
- layout.insertWidget(int(layout.count()) + 1, keep_evernote_tags)
-
- # Update Existing Notes
- update_existing_notes = QComboBox()
- update_existing_notes.addItems(["Ignore Existing Notes", "Update Existing Notes In-Place",
- "Delete and Re-Add Existing Notes"])
- update_existing_notes.setCurrentIndex(mw.col.conf.get(SETTING_UPDATE_EXISTING_NOTES,
- UpdateExistingNotes.UpdateNotesInPlace))
- update_existing_notes.activated.connect(update_evernote_update_existing_notes)
- layout.insertWidget(int(layout.count()) + 1, update_existing_notes)
-
- # Vertical Spacer
- vertical_spacer = QSpacerItem(20, 0, QSizePolicy.Minimum, QSizePolicy.Expanding)
- layout.addItem(vertical_spacer)
-
- # Parent Widget
- widget.setLayout(layout)
-
- # New Tab
- self.form.tabWidget.addTab(widget, "Evernote Importer")
-
-def update_evernote_default_deck():
- mw.col.conf[SETTING_DEFAULT_DECK] = evernote_default_deck.text()
-
-def update_evernote_default_tag():
- mw.col.conf[SETTING_DEFAULT_TAG] = evernote_default_tag.text()
-
-def update_evernote_tags_to_import():
- mw.col.conf[SETTING_TAGS_TO_IMPORT] = evernote_tags_to_import.text()
-
-def update_evernote_keep_tags():
- mw.col.conf[SETTING_KEEP_TAGS] = keep_evernote_tags.isChecked()
-
-def update_evernote_update_existing_notes(index):
- mw.col.conf[SETTING_UPDATE_EXISTING_NOTES] = index
-
-Preferences.setupOptions = wrap(Preferences.setupOptions, setup_evernote)
+ last_query = pf(last_query)
+ log_text += '\n - Last Query: ' + last_query
+ write_file_contents(log_text + '\n', 'sql\\scalar')
+ try:
+ res = self.execute(*a, **kw)
+ except TypeError as e:
+ write_file_contents(" > ERROR with scalar while executing query: %s\n > LAST QUERY: %s" % (str(e), last_query), 'sql\\scalar', crosspost='sql\\scalar-error')
+ raise
+ if not isinstance(res, sqlite.Cursor):
+ write_file_contents(' > Cursor: %s' % pf(res), 'sql\\scalar')
+ try:
+ res = res.fetchone()
+ except TypeError as e:
+ write_file_contents(" > ERROR with scalar while fetching result: %s\n > LAST QUERY: %s" % (str(e), last_query), 'sql\\scalar', crosspost='sql\\scalar-error')
+ raise
+ write_file_contents('', 'sql\\scalar')
+ if res:
+ return res[0]
+ return None
+
+def anknotes_execute(self, sql, *a, **kw):
+ log_text = 'Call to DB.execute():'
+ if not isinstance(self, DB):
+ log_text += '\n - Self: ' + pf(self)
+ if a:
+ log_text += '\n - Args: ' + pf(a)
+ if kw:
+ log_text += '\n - KWArgs: ' + pf(kw)
+ last_query=sql
+ if is_str_type(last_query):
+ last_query = last_query[:50]
+ else:
+ last_query = pf(last_query)
+ log_text += '\n - Query: ' + last_query
+ write_file_contents(log_text + '\n\n', 'sql\\execute')
+ self.ank_lastquery = sql
+
+def anknotes_onload():
+ # write_file_contents('%s: anknotes_onload' % __name__, 'load')
+ if in_anki():
+ addHook("profileLoaded", anknotes_profile_loaded)
+ if ANKNOTES.HOOKS.DB:
+ DB.scalar = anknotes_scalar # wrap(DB.scalar, anknotes_scalar, "before")
+ DB.execute = wrap(DB.execute, anknotes_execute, "before")
+ if ANKNOTES.HOOKS.SEARCH:
+ addHook("search", anknotes_search_hook)
+ Finder._query = wrap(Finder._query, anknotes_finder_query_wrap, "around")
+ Finder.findCards = wrap(Finder.findCards, anknotes_finder_findCards_wrap, "around")
+ browser.Browser._systemTagTree = wrap(browser.Browser._systemTagTree, anknotes_browser_tagtree_wrap, "around")
+ # write_file_contents('%s: anknotes_onload: anknotes_setup_menu' % __name__, 'load')
+ menu.anknotes_setup_menu()
+ Preferences.setupOptions = wrap(Preferences.setupOptions, settings.setup_evernote)
+ # write_file_contents('%s: anknotes_onload: complete' % __name__, 'load')
+
+anknotes_onload()
\ No newline at end of file
diff --git a/anknotes/_re.py b/anknotes/_re.py
new file mode 100644
index 0000000..151ddc9
--- /dev/null
+++ b/anknotes/_re.py
@@ -0,0 +1,277 @@
+"""Skeleton for 're' stdlib module."""
+
+
+def compile(pattern, flags=0):
+ """Compile a regular expression pattern, returning a pattern object.
+
+ :type pattern: bytes | unicode
+ :type flags: int
+ :rtype: __Regex
+ """
+ pass
+
+
+def search(pattern, string, flags=0):
+ """Scan through string looking for a match, and return a corresponding
+ match instance. Return None if no position in the string matches.
+
+ :type pattern: bytes | unicode | __Regex
+ :type string: T <= bytes | unicode
+ :type flags: int
+ :rtype: __Match[T] | None
+ """
+ pass
+
+
+def match(pattern, string, flags=0):
+ """Matches zero or more characters at the beginning of the string.
+
+ :type pattern: bytes | unicode | __Regex
+ :type string: T <= bytes | unicode
+ :type flags: int
+ :rtype: __Match[T] | None
+ """
+ pass
+
+
+def split(pattern, string, maxsplit=0, flags=0):
+ """Split string by the occurrences of pattern.
+
+ :type pattern: bytes | unicode | __Regex
+ :type string: T <= bytes | unicode
+ :type maxsplit: int
+ :type flags: int
+ :rtype: list[T]
+ """
+ pass
+
+
+def findall(pattern, string, flags=0):
+ """Return a list of all non-overlapping matches of pattern in string.
+
+ :type pattern: bytes | unicode | __Regex
+ :type string: T <= bytes | unicode
+ :type flags: int
+ :rtype: list[T]
+ """
+ pass
+
+
+def finditer(pattern, string, flags=0):
+ """Return an iterator over all non-overlapping matches for the pattern in
+ string. For each match, the iterator returns a match object.
+
+ :type pattern: bytes | unicode | __Regex
+ :type string: T <= bytes | unicode
+ :type flags: int
+ :rtype: collections.Iterable[__Match[T]]
+ """
+ pass
+
+
+def sub(pattern, repl, string, count=0, flags=0):
+ """Return the string obtained by replacing the leftmost non-overlapping
+ occurrences of pattern in string by the replacement repl.
+
+ :type pattern: bytes | unicode | __Regex
+ :type repl: bytes | unicode | collections.Callable
+ :type string: T <= bytes | unicode
+ :type count: int
+ :type flags: int
+ :rtype: T
+ """
+ pass
+
+
+def subn(pattern, repl, string, count=0, flags=0):
+ """Return the tuple (new_string, number_of_subs_made) found by replacing
+ the leftmost non-overlapping occurrences of pattern with the
+ replacement repl.
+
+ :type pattern: bytes | unicode | __Regex
+ :type repl: bytes | unicode | collections.Callable
+ :type string: T <= bytes | unicode
+ :type count: int
+ :type flags: int
+ :rtype: (T, int)
+ """
+ pass
+
+
+def escape(string):
+ """Escape all the characters in pattern except ASCII letters and numbers.
+
+ :type string: T <= bytes | unicode
+ :type: T
+ """
+ pass
+
+
+class __Regex(object):
+ """Mock class for a regular expression pattern object."""
+
+ def __init__(self, flags, groups, groupindex, pattern):
+ """Create a new pattern object.
+
+ :type flags: int
+ :type groups: int
+ :type groupindex: dict[bytes | unicode, int]
+ :type pattern: bytes | unicode
+ """
+ self.flags = flags
+ self.groups = groups
+ self.groupindex = groupindex
+ self.pattern = pattern
+
+ def search(self, string, pos=0, endpos=-1):
+ """Scan through string looking for a match, and return a corresponding
+ match instance. Return None if no position in the string matches.
+
+ :type string: T <= bytes | unicode
+ :type pos: int
+ :type endpos: int
+ :rtype: __Match[T] | None
+ """
+ pass
+
+ def match(self, string, pos=0, endpos=-1):
+ """Matches zero | more characters at the beginning of the string.
+
+ :type string: T <= bytes | unicode
+ :type pos: int
+ :type endpos: int
+ :rtype: __Match[T] | None
+ """
+ pass
+
+ def split(self, string, maxsplit=0):
+ """Split string by the occurrences of pattern.
+
+ :type string: T <= bytes | unicode
+ :type maxsplit: int
+ :rtype: list[T]
+ """
+ pass
+
+ def findall(self, string, pos=0, endpos=-1):
+ """Return a list of all non-overlapping matches of pattern in string.
+
+ :type string: T <= bytes | unicode
+ :type pos: int
+ :type endpos: int
+ :rtype: list[T]
+ """
+ pass
+
+ def finditer(self, string, pos=0, endpos=-1):
+ """Return an iterator over all non-overlapping matches for the
+ pattern in string. For each match, the iterator returns a
+ match object.
+
+ :type string: T <= bytes | unicode
+ :type pos: int
+ :type endpos: int
+ :rtype: collections.Iterable[__Match[T]]
+ """
+ pass
+
+ def sub(self, repl, string, count=0):
+ """Return the string obtained by replacing the leftmost non-overlapping
+ occurrences of pattern in string by the replacement repl.
+
+ :type repl: bytes | unicode | collections.Callable
+ :type string: T <= bytes | unicode
+ :type count: int
+ :rtype: T
+ """
+ pass
+
+ def subn(self, repl, string, count=0):
+ """Return the tuple (new_string, number_of_subs_made) found by replacing
+ the leftmost non-overlapping occurrences of pattern with the
+ replacement repl.
+
+ :type repl: bytes | unicode | collections.Callable
+ :type string: T <= bytes | unicode
+ :type count: int
+ :rtype: (T, int)
+ """
+ pass
+
+
+class __Match(object):
+ """Mock class for a match object."""
+
+ def __init__(self, pos, endpos, lastindex, lastgroup, re, string):
+ """Create a new match object.
+
+ :type pos: int
+ :type endpos: int
+ :type lastindex: int | None
+ :type lastgroup: int | bytes | unicode | None
+ :type re: __Regex
+ :type string: bytes | unicode
+ :rtype: __Match[T]
+ """
+ self.pos = pos
+ self.endpos = endpos
+ self.lastindex = lastindex
+ self.lastgroup = lastgroup
+ self.re = re
+ self.string = string
+
+ def expand(self, template):
+ """Return the string obtained by doing backslash substitution on the
+ template string template.
+
+ :type template: T
+ :rtype: T
+ """
+ pass
+
+ def group(self, *args):
+ """Return one or more subgroups of the match.
+
+ :rtype: T | tuple
+ """
+ pass
+
+ def groups(self, default=None):
+ """Return a tuple containing all the subgroups of the match, from 1 up
+ to however many groups are in the pattern.
+
+ :rtype: tuple
+ """
+ pass
+
+ def groupdict(self, default=None):
+ """Return a dictionary containing all the named subgroups of the match,
+ keyed by the subgroup name.
+
+ :rtype: dict[bytes | unicode, T]
+ """
+ pass
+
+ def start(self, group=0):
+ """Return the index of the start of the substring matched by group.
+
+ :type group: int | bytes | unicode
+ :rtype: int
+ """
+ pass
+
+ def end(self, group=0):
+ """Return the index of the end of the substring matched by group.
+
+ :type group: int | bytes | unicode
+ :rtype: int
+ """
+ pass
+
+ def span(self, group=0):
+ """Return a 2-tuple (start, end) for the substring matched by group.
+
+ :type group: int | bytes | unicode
+ :rtype: (int, int)
+ """
+ pass
diff --git a/anknotes/addict/__init__.py b/anknotes/addict/__init__.py
new file mode 100644
index 0000000..a22f2e8
--- /dev/null
+++ b/anknotes/addict/__init__.py
@@ -0,0 +1,8 @@
+from .addict import Dict
+
+__title__ = 'addict'
+__version__ = '0.4.0'
+__author__ = 'Mats Julian Olsen'
+__license__ = 'MIT'
+__copyright__ = 'Copyright 2014 Mats Julian Olsen'
+__all__ = ['Dict']
diff --git a/anknotes/addict/addict.py b/anknotes/addict/addict.py
new file mode 100644
index 0000000..7ccb63f
--- /dev/null
+++ b/anknotes/addict/addict.py
@@ -0,0 +1,368 @@
+from inspect import isgenerator
+import re
+import os
+import copy
+from anknotes.base import is_dict_type, item_to_list, is_seq_type
+
+
+class Dict(dict):
+ """
+ Dict is a subclass of dict, which allows you to get AND SET(!!)
+ items in the dict using the attribute syntax!
+
+ When you previously had to write:
+
+ my_dict = {'a': {'b': {'c': [1, 2, 3]}}}
+
+ you can now do the same simply by:
+
+ my_Dict = Dict()
+ my_Dict.a.b.c = [1, 2, 3]
+
+ Or for instance, if you'd like to add some additional stuff,
+ where you'd with the normal dict would write
+
+ my_dict['a']['b']['d'] = [4, 5, 6],
+
+ you may now do the AWESOME
+
+ my_Dict.a.b.d = [4, 5, 6]
+
+ instead. But hey, you can always use the same syntax as a regular dict,
+ however, this will not raise TypeErrors or AttributeErrors at any time
+ while you try to get an item. A lot like a defaultdict.
+
+ """
+
+ def __init__(self, *a, **kw):
+ """
+ If we're initialized with a dict, make sure we turn all the
+ subdicts into Dicts as well.
+
+ """
+ a = list(a)
+ mro = self._get_arg_(a, int, 'mro', kw)
+ pass # self.log_init('Dict', mro, a, kw)
+ self.update(*a, **kw)
+
+ def __setattr__(self, name, value):
+ """
+ setattr is called when the syntax a.b = 2 is used to set a value.
+
+ """
+ if hasattr(Dict, name):
+ raise AttributeError("'Dict' object attribute "
+ "'{0}' is read-only".format(name))
+ else:
+ self[name] = value
+
+ def __setitem__(self, name, value):
+ """
+ This is called when trying to set a value of the Dict using [].
+ E.g. some_instance_of_Dict['b'] = val. If 'val
+
+ """
+ value = self._hook(value)
+ super(Dict, self).__setitem__(name, value)
+
+ def _hook(self, item):
+ """
+ Called to ensure that each dict-instance that are being set
+ is a addict Dict. Recurses.
+
+ """
+ if isinstance(item, dict):
+ return self._new_instance_(item)
+ if isinstance(item, (list, tuple)):
+ return item.__class__(self._hook(elem) for elem in item)
+ return item
+
+ def __getattr__(self, item):
+ if item not in self and item in dir(self):
+ return super(Dict, self).__getattr__(item)
+ return self.__getitem__(item)
+
+ def _new_instance_(self, *a, **kw):
+ return (self.__class__.mro()[self._mro_offset_] if self._is_obj_attr_('_mro_offset_') else self.__class__)(*a, **kw)
+
+ def __getitem__(self, name):
+ """
+ This is called when the Dict is accessed by []. E.g.
+ some_instance_of_Dict['a'];
+ If the name is in the dict, we return it. Otherwise we set both
+ the attr and item to a new instance of Dict.
+
+ """
+ if name not in self:
+ self[name] = self._new_instance_()
+ return super(Dict, self).__getitem__(name)
+
+ def __delattr__(self, name):
+ """ Is invoked when del some_addict.b is called. """
+ del self[name]
+
+ _re_pattern = re.compile('[a-zA-Z_][a-zA-Z0-9_]*')
+
+ # def log(self, str_, method, do_print=True, prefix=''):
+ # cls = self.__class__
+ # str_lbl = self.label.full if self.label else ''
+ # if str_lbl:
+ # str_lbl += ': '
+ # str_ = prefix + '%17s %-20s %s' % ('<%s>' % cls.__name__, str_lbl, str_)
+ # if do_print:
+ # print str_
+ # write_file_contents(str_, 'Dicts\\%s\\%s' % (cls.__name__, method))
+
+ # def log_action(self, method, action, name, value, key=None, via=None, extra='', log_self=False):
+ # if key in ['_my_attrs_','_override_default_']:
+ # return
+ # if extra:
+ # extra += ' '
+ # type = ('<%s>' % value.__class__.__name__).center(10)
+ # log_str = action.ljust(12) + ' '
+ # log_str += name.ljust(12) + ' '
+ # log_str += ('via '+via if via else '').ljust(10) + ' '
+ # log_str += ('for `%s`' % key if key else '').ljust(25) + ' '
+ # log_str += 'to %10s%s %s' % (extra, type, str(value))
+ # if log_self:
+ # log_str += ' \n\n Self: ' + repr(self)
+ # pass # self.log(log_str, method);
+
+ # def log_init(self, type, mro, a, kw):
+ # cls = self.__class__
+ # mro_name = cls.mro()[mro].__name__
+ # mro_name = (':' + mro_name) if mro_name != cls.__name__ and mro_name != type else ''
+ # log_str = "Init: %s%s #%d" % (type, mro_name, mro)
+ # log_str += "\n Args: %s" % a if a else ""
+ # log_str += "\n KWArgs: %s" % kw if kw else ""
+ # pass # self.log(log_str + '\n', '__init__', prefix='-'*40+'\n', do_print=False)
+
+ # def clear_logs(self):
+ # name=self.__class__.__name__
+ # reset_logs('Dicts' + os.path.sep + name, self.make_banner(name))
+
+ @staticmethod
+ def get_default_value(cls, default=None):
+ if default is not None:
+ return default
+ if cls is str or cls is unicode:
+ return ''
+ elif cls is int:
+ return 0
+ elif cls is bool:
+ return False
+ return None
+
+ def _get_arg_(self, a, cls=None, key=None, kw=None, default=None):
+ if cls is None:
+ cls = (str, unicode)
+ if a and isinstance(a[0], cls):
+ val = a[0]
+ #del a[0]
+ elif kw and key in kw:
+ val = kw[key]
+ del kw[key]
+ else:
+ val = self.get_default_value(cls, default)
+ return val
+
+ def _key_transform_(self, key, keys=None, all=False, attrs=False):
+ return key
+
+ def _key_transform_all_(self, key, keys=None):
+ return self._key_transform_(key, keys, all=True)
+
+ def _key_transform_attrs_(self, key, keys=None):
+ return self._key_transform_(key, keys, attrs=True)
+
+ def __contains__(self, item):
+ key = self._key_transform_(item)
+ return key in self._dict_keys_()
+
+ def _is_obj_attr_(self, key):
+ keys = self._obj_attrs_()
+ return self._key_transform_(key, keys) in keys
+
+ def _dict_keys_(self):
+ dict_keys = []
+ for k in self.keys():
+ if isinstance(k, str):
+ m = self._re_pattern.match(k)
+ if m:
+ dict_keys.append(m.string)
+ return dict_keys
+
+ def _obj_attrs_(self):
+ return list(dir(self.__class__))
+
+ def __dir__(self):
+ """
+ Return a list of addict object attributes.
+ This includes key names of any dict entries, filtered to the subset of
+ valid attribute names (e.g. alphanumeric strings beginning with a
+ letter or underscore). Also includes attributes of parent dict class.
+ """
+ return self._dict_keys_() + self._obj_attrs_()
+
+ def _ipython_display_(self):
+ print(str(self)) # pragma: no cover
+
+ def _repr_html_(self):
+ return str(self)
+
+ def prune(self, prune_zero=False, prune_empty_list=True):
+ """
+ Removes all empty Dicts and falsy stuff inside the Dict.
+ E.g
+ >>> a = Dict()
+ >>> a.b.c.d
+ {}
+ >>> a.a = 2
+ >>> a
+ {'a': 2, 'b': {'c': {'d': {}}}}
+ >>> a.prune()
+ >>> a
+ {'a': 2}
+
+ Set prune_zero=True to remove 0 values
+ E.g
+ >>> a = Dict()
+ >>> a.b.c.d = 0
+ >>> a.prune(prune_zero=True)
+ >>> a
+ {}
+
+ Set prune_empty_list=False to have them persist
+ E.g
+ >>> a = Dict({'a': []})
+ >>> a.prune()
+ >>> a
+ {}
+ >>> a = Dict({'a': []})
+ >>> a.prune(prune_empty_list=False)
+ >>> a
+ {'a': []}
+ """
+ for key, val in list(self.items()):
+ if ((not val) and ((val != 0) or prune_zero) and
+ not isinstance(val, list)):
+ del self[key]
+ elif isinstance(val, Dict):
+ val.prune(prune_zero, prune_empty_list)
+ if not val:
+ del self[key]
+ elif isinstance(val, (list, tuple)):
+ new_iter = self._prune_iter(val, prune_zero, prune_empty_list)
+ if (not new_iter) and prune_empty_list:
+ del self[key]
+ else:
+ if isinstance(val, tuple):
+ new_iter = tuple(new_iter)
+ self[key] = new_iter
+
+ @classmethod
+ def _prune_iter(cls, some_iter, prune_zero=False, prune_empty_list=True):
+ new_iter = []
+ for item in some_iter:
+ if item == 0 and prune_zero:
+ continue
+ elif isinstance(item, Dict):
+ item.prune(prune_zero, prune_empty_list)
+ if item:
+ new_iter.append(item)
+ elif isinstance(item, (list, tuple)):
+ new_item = item.__class__(
+ cls._prune_iter(item, prune_zero, prune_empty_list))
+ if new_item or not prune_empty_list:
+ new_iter.append(new_item)
+ else:
+ new_iter.append(item)
+ return new_iter
+
+ def to_dict(self):
+ """ Recursively turn your addict Dicts into dicts. """
+ base = {}
+ cls = self.__class__
+ for key, value in self.items():
+ if isinstance(value, cls):
+ base[key] = value.to_dict()
+ elif isinstance(value, (list, tuple)):
+ base[key] = value.__class__(
+ item.to_dict() if isinstance(item, cls) else
+ item for item in value)
+ else:
+ base[key] = value
+ return base
+
+ def copy(self):
+ """
+ Return a disconnected deep copy of self. Children of type Dict, list
+ and tuple are copied recursively while values that are instances of
+ other mutable objects are not copied.
+
+ """
+ return self._new_instance_(self.to_dict())
+
+ def __deepcopy__(self, memo):
+ """ Return a disconnected deep copy of self. """
+
+ y = self.__class__()
+ memo[id(self)] = y
+ for key, value in self.items():
+ y[copy.deepcopy(key, memo)] = copy.deepcopy(value, memo)
+ return y
+
+ def initialize_keys(self, arg, split_chr='|'):
+ """
+ Initializes keys from string or sequence.
+ From string:
+ 1) String is converted to list, split by 'split_chr' argument
+ From list:
+ 2) If list item has two subitems, it will be treated as a key-value pair
+ E.g: [['key', 'value'], ['key2', 'value2']] will set keys with corresponding values
+ 3) Otherwise, it will be treated as a list of keys
+ E.g.: [['key1', 'key2', 'key3']] will instantiate keys as new Dicts
+ 4) If list item is not a sequence, it will be converted to a list as per Example 1
+ E.g.: ['key1', 'key2', 'key3'] will act similarly to Example 3
+ 5) Exception: If list item is a dict, it will be handled via update.update_dict
+ """
+ if not is_seq_type(arg):
+ arg = item_to_list(arg, split_chr=split_chr)
+ for items in arg:
+ if is_dict_type(items):
+ self.update(items)
+ continue
+ if not is_seq_type(items):
+ items = item_to_list(items, split_chr=split_chr)
+ if len(items) is 1:
+ self[items[0]]
+ elif len(items) is 2:
+ self[items[0]] = items[1]
+ else:
+ self.update_seq(items)
+
+ def update(self, *a, **kw):
+ """ Update self with dict, sequence, or kwargs """
+ def update_dict(d):
+ """ Recursively merge d into self. """
+ for k, v in d.items():
+ if k in self and is_dict_type(self[k], v):
+ self[k].update(v)
+ else:
+ self[k] = v
+
+ # Begin update()
+ for arg in a:
+ if not arg:
+ continue
+ elif isinstance(arg, dict):
+ update_dict(arg)
+ elif isinstance(arg, tuple) and len(arg) is 2 and not isinstance(arg[0], tuple):
+ self[arg[0]] = arg[1]
+ elif is_seq_type(arg):
+ self.initialize_keys(arg)
+ else:
+ raise TypeError("Dict does not understand "
+ "{0} types".format(arg.__class__))
+ update_dict(kw)
+
diff --git a/anknotes/ankEvernote.py b/anknotes/ankEvernote.py
new file mode 100644
index 0000000..28c078c
--- /dev/null
+++ b/anknotes/ankEvernote.py
@@ -0,0 +1,603 @@
+# -*- coding: utf-8 -*-
+### Python Imports
+import socket
+import stopwatch
+from datetime import datetime, timedelta
+from StringIO import StringIO
+
+# try:
+# from lxml import etree
+# eTreeImported = True
+# except ImportError:
+# eTreeImported = False
+
+try:
+ from pysqlite2 import dbapi2 as sqlite
+except ImportError:
+ from sqlite3 import dbapi2 as sqlite
+
+### Anknotes Imports
+from anknotes.shared import *
+from anknotes.error import *
+from anknotes.imports import in_anki
+from anknotes.base import is_str, encode
+
+### Anki Imports
+if in_anki():
+ ### Anknotes Class Imports
+ from anknotes.EvernoteNoteFetcher import EvernoteNoteFetcher
+ from anknotes.EvernoteNotePrototype import EvernoteNotePrototype
+
+ ### Evernote Imports
+ from anknotes.evernote.edam.type.ttypes import Note as EvernoteNote
+ from anknotes.evernote.edam.error.ttypes import EDAMSystemException, EDAMUserException, EDAMNotFoundException
+ from anknotes.evernote.api.client import EvernoteClient
+
+ ### Anki Imports
+ from aqt.utils import openLink, getText, showInfo
+ from aqt import mw
+
+
+### Anki Imports
+# import anki
+# import aqt
+# from anki.hooks import wrap, addHook
+# from aqt.preferences import Preferences
+# from aqt.utils import getText, openLink, getOnlyText
+# from aqt.qt import QLineEdit, QLabel, QVBoxLayout, QHBoxLayout, QGroupBox, SIGNAL, QCheckBox, \
+# QComboBox, QSpacerItem, QSizePolicy, QWidget, QSpinBox, QFormLayout, QGridLayout, QFrame, QPalette, \
+# QRect, QStackedLayout, QDateEdit, QDateTimeEdit, QTimeEdit, QDate, QDateTime, QTime, QPushButton, QIcon, QMessageBox, QPixmap, QMenu, QAction
+# from aqt import mw
+
+etree = None
+
+class Evernote(object):
+ metadata = {}
+ """:type : dict[str, evernote.edam.type.ttypes.Note]"""
+ notebook_data = {}
+ """:type : dict[str, anknotes.structs.EvernoteNotebook]"""
+ tag_data = {}
+ """:type : dict[str, anknotes.structs.EvernoteTag]"""
+ DTD = None
+ __hasValidator = None
+ token = None
+ client = None
+ """:type : EvernoteClient """
+
+ def hasValidator(self):
+ global etree
+ if self.__hasValidator is None:
+ self.__hasValidator = import_etree()
+ if self.__hasValidator:
+ from anknotes.imports import etree
+ return self.__hasValidator
+
+ def __init__(self):
+ self.tag_data = {}
+ self.notebook_data = {}
+ self.noteStore = None
+ self.getNoteCount = 0
+ # self.hasValidator = eTreeImported
+ if ankDBIsLocal():
+ log("Skipping Evernote client load (DB is Local)", 'client')
+ return
+ self.setup_client()
+
+ def setup_client(self):
+ auth_token = SETTINGS.EVERNOTE.AUTH_TOKEN.fetch()
+ if not auth_token:
+ # First run of the Plugin we did not save the access key yet
+ secrets = {'holycrepe': '36f46ea5dec83d4a', 'scriptkiddi-2682': '965f1873e4df583c'}
+ client = EvernoteClient(
+ consumer_key=EVERNOTE.API.CONSUMER_KEY,
+ consumer_secret=secrets[EVERNOTE.API.CONSUMER_KEY],
+ sandbox=EVERNOTE.API.IS_SANDBOXED
+ )
+ request_token = client.get_request_token('https://fap-studios.de/anknotes/index.html')
+ url = client.get_authorize_url(request_token)
+ showInfo("We will open a Evernote Tab in your browser so you can allow access to your account")
+ openLink(url)
+ oauth_verifier = getText(prompt="Please copy the code that showed up, after allowing access, in here")[0]
+ auth_token = client.get_access_token(
+ request_token.get('oauth_token'),
+ request_token.get('oauth_token_secret'),
+ oauth_verifier)
+ SETTINGS.EVERNOTE.AUTH_TOKEN.save(auth_token)
+ else:
+ client = EvernoteClient(token=auth_token, sandbox=EVERNOTE.API.IS_SANDBOXED)
+ self.token = auth_token
+ self.client = client
+ log("Set up Evernote Client", 'client')
+
+ def initialize_note_store(self):
+ if self.noteStore:
+ return EvernoteAPIStatus.Success
+ api_action_str = u'trying to initialize the Evernote Note Store.'
+ log_api("get_note_store")
+ if not self.client:
+ log_error(
+ "Client does not exist for some reason. Did we not initialize Evernote Class? Current token: " + str(
+ self.token))
+ self.setup_client()
+ try:
+ self.noteStore = self.client.get_note_store()
+ except EDAMSystemException as e:
+ if not HandleEDAMRateLimitError(e, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ return EvernoteAPIStatus.RateLimitError
+ except socket.error as v:
+ if not HandleSocketError(v, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ return EvernoteAPIStatus.SocketError
+ return EvernoteAPIStatus.Success
+
+ def loadDTD(self):
+ if self.DTD:
+ return
+ timerInterval = stopwatch.Timer()
+ log("Loading ENML DTD", "lxml", timestamp=False, do_print=True)
+ self.DTD = etree.DTD(FILES.ANCILLARY.ENML_DTD)
+ log("DTD Loaded in %s\n" % str(timerInterval), "lxml", timestamp=False, do_print=True)
+ log(' > Note Validation: ENML DTD Loaded in %s' % str(timerInterval))
+ del timerInterval
+
+ def validateNoteBody(self, noteBody, title="Note Body"):
+ """
+
+ :param noteBody:
+ :type noteBody : str | unicode
+ :param title:
+ :return:
+ :rtype : (EvernoteAPIStatus, [str|unicode])
+ """
+ self.loadDTD()
+ noteBody = noteBody.replace('"http://xml.evernote.com/pub/enml2.dtd"',
+ '"%s"' % convert_filename_to_local_link(FILES.ANCILLARY.ENML_DTD))
+ parser = etree.XMLParser(dtd_validation=True, attribute_defaults=True)
+ try:
+ root = etree.fromstring(noteBody, parser)
+ except Exception as e:
+ log_str = "XML Loading of %s failed.\n - Error Details: %s" % (title, str(e))
+ log(log_str, "lxml", timestamp=False, do_print=True)
+ log_error(log_str, False)
+ return EvernoteAPIStatus.UserError, [log_str]
+ try:
+ success = self.DTD.validate(root)
+ except Exception as e:
+ log_str = "DTD Validation of %s failed.\n - Error Details: %s" % (title, str(e))
+ log(log_str, "lxml", timestamp=False, do_print=True)
+ log_error(log_str, False)
+ return EvernoteAPIStatus.UserError, [log_str]
+ log("Validation %-9s for %s" % ("Succeeded" if success else "Failed", title), "lxml", timestamp=False,
+ do_print=True)
+ errors = [str(x) for x in self.DTD.error_log.filter_from_errors()]
+ if not success:
+ log_str = "DTD Validation Errors for %s: \n%s\n" % (title, str(errors))
+ log(log_str, "lxml", timestamp=False)
+ log_error(log_str, False)
+ return EvernoteAPIStatus.Success if success else EvernoteAPIStatus.UserError, errors
+
+ def validateNoteContent(self, content, title="Note Contents"):
+ """
+
+ :param content: Valid ENML without the tags. Will be processed by makeNoteBody
+ :type content : str|unicode
+ :return:
+ :rtype : (EvernoteAPIStatus, [str|unicode])
+ """
+ return self.validateNoteBody(self.makeNoteBody(content), title)
+
+ def updateNote(self, guid, noteTitle, noteBody, tagNames=None, parentNotebook=None, noteType=None, resources=None):
+ """
+ Update a Note instance with title and body
+ Send Note object to user's account
+ :rtype : (EvernoteAPIStatus, evernote.edam.type.ttypes.Note)
+ :returns Status and Note
+ """
+ if resources is None:
+ resources = []
+ return self.makeNote(noteTitle, noteBody, tagNames=tagNames, parentNotebook=parentNotebook, noteType=noteType,
+ resources=resources,
+ guid=guid)
+
+ @staticmethod
+ def makeNoteBody(content, resources=None, encode=True):
+ ## Build body of note
+ if resources is None:
+ resources = []
+ nBody = content
+ if not nBody.startswith(""
+ nBody += ""
+ nBody += "%s" % content + ""
+ if encode:
+ nBody = encode(nBody)
+ return nBody
+
+ @staticmethod
+ def addNoteToMakeNoteQueue(noteTitle, noteContents, tagNames=list(), parentNotebook=None, resources=None,
+ noteType=None,
+ guid=None):
+ db = ankDB(TABLES.NOTE_VALIDATION_QUEUE)
+ if not noteType:
+ noteType = 'Unspecified'
+ if resources is None:
+ resources = []
+ args = [noteType]
+ sql = "FROM {t} WHERE noteType = ? AND "
+ if guid:
+ sql += 'guid = ?'
+ args.append(guid)
+ else:
+ sql += 'title = ? AND contents = ?'
+ args += [noteTitle, noteContents]
+ statuses = db.all('SELECT validation_status ' + sql, args)
+ if statuses:
+ if str(statuses[0]['validation_status']) == '1':
+ return EvernoteAPIStatus.Success
+ db.execute("DELETE " + sql, args)
+ db.execute(
+ "INSERT INTO {t}(guid, title, contents, tagNames, notebookGuid, noteType) VALUES(?, ?, ?, ?, ?, ?)",
+ guid, noteTitle, noteContents, ','.join(tagNames), parentNotebook, noteType)
+ return EvernoteAPIStatus.RequestQueued
+
+ def makeNote(self, noteTitle=None, noteContents=None, tagNames=None, parentNotebook=None, resources=None,
+ noteType=None, guid=None,
+ validated=None, enNote=None):
+ """
+ Create or Update a Note instance with title and body
+ Send Note object to user's account
+ :type noteTitle: str
+ :param noteContents: Valid ENML without the tags. Will be processed by makeNoteBody
+ :type enNote : EvernoteNotePrototype
+ :rtype : (EvernoteAPIStatus, EvernoteNote)
+ :returns Status and Note
+ """
+ if tagNames is None:
+ tagNames = []
+ if enNote:
+ guid, noteTitle, noteContents, tagNames, parentNotebook = enNote.Guid, enNote.FullTitle, enNote.Content, enNote.Tags, enNote.NotebookGuid or parentNotebook
+ if resources is None:
+ resources = []
+ callType = "create"
+ validation_status = EvernoteAPIStatus.Uninitialized
+ if validated is None:
+ if not EVERNOTE.UPLOAD.VALIDATION.ENABLED:
+ validated = True
+ else:
+ validation_status = self.addNoteToMakeNoteQueue(noteTitle, noteContents, tagNames, parentNotebook,
+ resources, guid)
+ if not validation_status.IsSuccess and not self.hasValidator:
+ return validation_status, None
+ log('%s%s: %s: ' % ('+VALIDATOR ' if self.hasValidator else '' + noteType, str(validation_status), noteTitle),
+ 'validation')
+ ourNote = EvernoteNote()
+ ourNote.title = encode(noteTitle)
+ if guid:
+ callType = "update"; ourNote.guid = guid
+
+ ## Build body of note
+ nBody = self.makeNoteBody(noteContents, resources)
+ if validated is not True and not validation_status.IsSuccess:
+ status, errors = self.validateNoteBody(nBody, ourNote.title)
+ assert isinstance(status, EvernoteAPIStatus)
+ if not status.IsSuccess:
+ return status, None
+ ourNote.content = nBody
+
+ notestore_status = self.initialize_note_store()
+ if not notestore_status.IsSuccess:
+ return notestore_status, None
+
+ while '' in tagNames: tagNames.remove('')
+ if tagNames:
+ if EVERNOTE.API.IS_SANDBOXED and not '#Sandbox' in tagNames:
+ tagNames.append("#Sandbox")
+ ourNote.tagNames = tagNames
+
+ ## parentNotebook is optional; if omitted, default notebook is used
+ if parentNotebook:
+ if hasattr(parentNotebook, 'guid'):
+ ourNote.notebookGuid = parentNotebook.guid
+ elif hasattr(parentNotebook, 'Guid'):
+ ourNote.notebookGuid = parentNotebook.Guid
+ elif is_str(parentNotebook):
+ ourNote.notebookGuid = parentNotebook
+
+ ## Attempt to create note in Evernote account
+
+ api_action_str = u'trying to %s a note' % callType
+ log_api(callType + "Note", "'%s'" % noteTitle)
+ try:
+ note = getattr(self.noteStore, callType + 'Note')(self.token, ourNote)
+ except EDAMSystemException as e:
+ if not HandleEDAMRateLimitError(e, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ return EvernoteAPIStatus.RateLimitError, None
+ except socket.error as v:
+ if not HandleSocketError(v, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ return EvernoteAPIStatus.SocketError, None
+ except EDAMUserException as edue:
+ ## Something was wrong with the note data
+ ## See EDAMErrorCode enumeration for error code explanation
+ ## http://dev.evernote.com/documentation/reference/Errors.html#Enum_EDAMErrorCode
+ print "EDAMUserException:", edue
+ log_error("-" * 50, crosspost_to_default=False)
+ log_error("EDAMUserException: " + str(edue), crosspost='api')
+ log_error(str(ourNote.tagNames), crosspost_to_default=False)
+ log_error(str(ourNote.content), crosspost_to_default=False)
+ log_error("-" * 50 + "\r\n", crosspost_to_default=False)
+ if EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ return EvernoteAPIStatus.UserError, None
+ except EDAMNotFoundException as ednfe:
+ print "EDAMNotFoundException:", ednfe
+ log_error("-" * 50, crosspost_to_default=False)
+ log_error("EDAMNotFoundException: " + str(ednfe), crosspost='api')
+ if callType is "update":
+ log_error('GUID: ' + str(ourNote.guid), crosspost_to_default=False)
+ if ourNote.notebookGuid:
+ log_error('Notebook GUID: ' + str(ourNote.notebookGuid), crosspost_to_default=False)
+ log_error("-" * 50 + "\r\n", crosspost_to_default=False)
+ if EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ return EvernoteAPIStatus.NotFoundError, None
+ except Exception as e:
+ print "Unknown Exception:", e
+ log_error("-" * 50, crosspost_to_default=False)
+ log_error("Unknown Exception: " + str(e))
+ log_error(str(ourNote.tagNames), crosspost_to_default=False)
+ log_error(str(ourNote.content), crosspost_to_default=False)
+ log_error("-" * 50 + "\r\n", crosspost_to_default=False)
+ # return EvernoteAPIStatus.UnhandledError, None
+ raise
+ # noinspection PyUnboundLocalVariable
+ note.content = nBody
+ return EvernoteAPIStatus.Success, note
+
+ def create_evernote_notes(self, evernote_guids=None, use_local_db_only=False):
+ """
+ Create EvernoteNote objects from Evernote GUIDs using EvernoteNoteFetcher.getNote().
+ Will prematurely return if fetcher.getNote fails
+
+ :rtype : EvernoteNoteFetcherResults
+ :param evernote_guids:
+ :param use_local_db_only: Do not initiate API calls
+ :return: EvernoteNoteFetcherResults
+ """
+ if not hasattr(self, 'evernote_guids') or evernote_guids:
+ self.evernote_guids = evernote_guids
+ if not use_local_db_only:
+ self.check_ancillary_data_up_to_date()
+ action_str_base = 'Create'
+ action_str = 'Creation Of'
+ info = stopwatch.ActionInfo(action_str, 'Evernote Notes', report_if_empty=False)
+ tmr = stopwatch.Timer(evernote_guids, info=info,
+ label='Add\\Evernote-%sNotes' % (action_str_base))
+ fetcher = EvernoteNoteFetcher(self, use_local_db_only=use_local_db_only)
+ if not evernote_guids:
+ fetcher.results.Status = EvernoteAPIStatus.EmptyRequest; return fetcher.results
+ if in_anki():
+ fetcher.evernoteQueryTags = SETTINGS.EVERNOTE.QUERY.TAGS.fetch().replace(',', ' ').split()
+ fetcher.keepEvernoteTags = SETTINGS.ANKI.TAGS.KEEP_TAGS.fetch()
+ fetcher.deleteQueryTags = SETTINGS.ANKI.TAGS.DELETE_EVERNOTE_QUERY_TAGS.fetch()
+ fetcher.tagsToDelete = SETTINGS.ANKI.TAGS.TO_DELETE.fetch().replace(',', ' ').split()
+ for evernote_guid in self.evernote_guids:
+ if not fetcher.getNote(evernote_guid):
+ return fetcher.results
+ tmr.reportSuccess()
+ tmr.step(fetcher.result.Note.FullTitle)
+ tmr.Report()
+ return fetcher.results
+
+ def check_ancillary_data_up_to_date(self):
+ new_tags = 0 if self.check_tags_up_to_date() else self.update_tags_database(
+ "Tags were not up to date when checking ancillary data")
+ new_nbs = 0 if self.check_notebooks_up_to_date() else self.update_notebooks_database()
+ self.report_ancillary_data_results(new_tags, new_nbs, 'Forced ')
+
+ def update_ancillary_data(self):
+ new_tags = self.update_tags_database("Manual call to update ancillary data")
+ new_nbs = self.update_notebooks_database()
+ self.report_ancillary_data_results(new_tags, new_nbs, 'Manual ', report_blank=True)
+
+ @staticmethod
+ def report_ancillary_data_results(new_tags, new_nbs, title_prefix='', report_blank=False):
+ str_ = ''
+ if new_tags is 0 and new_nbs is 0:
+ if not report_blank:
+ return
+ str_ = 'No new tags or notebooks found'
+ elif new_tags is None and new_nbs is None:
+ str_ = 'Error downloading ancillary data'
+ elif new_tags is None:
+ str_ = 'Error downloading tags list, and '
+ elif new_nbs is None:
+ str_ = 'Error downloading notebooks list, and '
+
+ if new_tags > 0 and new_nbs > 0:
+ str_ = '%d new tag%s and %d new notebook%s found' % (
+ new_tags, '' if new_tags is 1 else 's', new_nbs, '' if new_nbs is 1 else 's')
+ elif new_nbs > 0:
+ str_ += '%d new notebook%s found' % (new_nbs, '' if new_nbs is 1 else 's')
+ elif new_tags > 0:
+ str_ += '%d new tag%s found' % (new_tags, '' if new_tags is 1 else 's')
+ show_tooltip("%sUpdate of ancillary data complete: " % title_prefix + str_, do_log=True)
+
+ def set_notebook_data(self):
+ if not hasattr(self, 'notebook_data') or not self.notebook_data or len(self.notebook_data.keys()) == 0:
+ self.notebook_data = {x['guid']: EvernoteNotebook(x) for x in
+ ankDB().execute("SELECT guid, name FROM {nb} WHERE 1")}
+
+ def check_notebook_metadata(self, notes):
+ """
+ :param notes:
+ :type : list[EvernoteNotePrototype]
+ :return:
+ """
+ self.set_notebook_data()
+ for note in notes:
+ assert (isinstance(note, EvernoteNotePrototype))
+ if note.NotebookGuid in self.notebook_data:
+ continue
+ new_nbs = self.update_notebooks_database()
+ if note.NotebookGuid in self.notebook_data:
+ log(
+ "Missing notebook GUID %s for note %s when checking notebook metadata. Notebook was found after updating Anknotes' notebook database." + '' if new_nbs < 1 else ' In total, %d new notebooks were found.' % new_nbs)
+ continue
+ log_error("FATAL ERROR: Notebook GUID %s for Note %s: %s does not exist on Evernote servers" % (
+ note.NotebookGuid, note.Guid, note.Title))
+ raise EDAMNotFoundException()
+ return True
+
+ def check_notebooks_up_to_date(self):
+ for evernote_guid in self.evernote_guids:
+ note_metadata = self.metadata[evernote_guid]
+ notebookGuid = note_metadata.notebookGuid
+ if not notebookGuid:
+ log_error(" > Notebook check: Unable to find notebook guid for '%s'. Returned '%s'. Metadata: %s" % (
+ evernote_guid, str(notebookGuid), str(note_metadata)), crosspost_to_default=False)
+ elif notebookGuid not in self.notebook_data:
+ notebook = EvernoteNotebook(fetch_guid=notebookGuid)
+ if not notebook.success:
+ log(" > Notebook check: Missing notebook guid '%s'. Will update with an API call." % notebookGuid)
+ return False
+ self.notebook_data[notebookGuid] = notebook
+ return True
+
+ def update_notebooks_database(self):
+ notestore_status = self.initialize_note_store()
+ if not notestore_status.IsSuccess:
+ return None # notestore_status
+ api_action_str = u'trying to update Evernote notebooks.'
+ log_api("listNotebooks")
+ try:
+ notebooks = self.noteStore.listNotebooks(self.token)
+ """: type : list[evernote.edam.type.ttypes.Notebook] """
+ except EDAMSystemException as e:
+ if not HandleEDAMRateLimitError(e, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ return None
+ except socket.error as v:
+ if not HandleSocketError(v, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ return None
+ data = []
+ self.notebook_data = {}
+ for notebook in notebooks:
+ self.notebook_data[notebook.guid] = {"stack": notebook.stack, "name": notebook.name}
+ data.append(
+ [notebook.guid, notebook.name, notebook.updateSequenceNum, notebook.serviceUpdated, notebook.stack])
+ db = ankDB(TABLES.EVERNOTE.NOTEBOOKS)
+ old_count = db.count()
+ db.drop(db.table)
+ db.recreate()
+ # log_dump(data, 'update_notebooks_database table data', crosspost_to_default=False)
+ db.executemany(
+ "INSERT INTO `{t}`(`guid`,`name`,`updateSequenceNum`,`serviceUpdated`, `stack`) VALUES (?, ?, ?, ?, ?)",
+ data)
+ db.commit()
+ return len(self.notebook_data) - old_count
+
+ def update_tags_database(self, reason_str=''):
+ if hasattr(self, 'LastTagDBUpdate') and datetime.now() - self.LastTagDBUpdate < timedelta(minutes=15):
+ return None
+ self.LastTagDBUpdate = datetime.now()
+ notestore_status = self.initialize_note_store()
+ if not notestore_status.IsSuccess:
+ return None # notestore_status
+ api_action_str = u'trying to update Evernote tags.'
+ log_api("listTags" + (': ' + reason_str) if reason_str else '')
+ try:
+ tags = self.noteStore.listTags(self.token)
+ """: type : list[evernote.edam.type.ttypes.Tag] """
+ except EDAMSystemException as e:
+ if not HandleEDAMRateLimitError(e, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ return None
+ except socket.error as v:
+ if not HandleSocketError(v, api_action_str) or EVERNOTE.API.DEBUG_RAISE_ERRORS:
+ raise
+ return None
+ data = []
+ self.tag_data = {}
+ enTag = None
+ for tag in tags:
+ enTag = EvernoteTag(tag)
+ self.tag_data[enTag.Guid] = enTag
+ data.append(enTag.items())
+ if not enTag:
+ return None
+ db = ankDB(TABLES.EVERNOTE.TAGS)
+ old_count = db.count()
+ db.drop(db.table)
+ db.recreate()
+ db.executemany(enTag.sqlUpdateQuery(), data)
+ db.commit()
+ return len(self.tag_data) - old_count
+
+ def set_tag_data(self):
+ if not hasattr(self, 'tag_data') or not self.tag_data or len(self.tag_data.keys()) == 0:
+ self.tag_data = {x['guid']: EvernoteTag(x) for x in ankDB().execute("SELECT guid, name FROM {tt} WHERE 1")}
+
+ def get_missing_tags(self, current_tags, from_guids=True):
+ if isinstance(current_tags, list):
+ current_tags = set(current_tags)
+ self.set_tag_data()
+ all_tags = set(self.tag_data.keys() if from_guids else [v.Name for k, v in self.tag_data.items()])
+ missing_tags = current_tags - all_tags
+ if missing_tags:
+ log_error("Missing Tag %s(s) were found:\nMissing: %s\n\nCurrent: %s\n\nAll Tags: %s\n\nTag Data: %s" % (
+ 'Guids' if from_guids else 'Names', ', '.join(sorted(missing_tags)), ', '.join(sorted(current_tags)),
+ ', '.join(sorted(all_tags)), str(self.tag_data)))
+ return missing_tags
+
+ def get_matching_tag_data(self, tag_guids=None, tag_names=None):
+ tagGuids = []
+ tagNames = []
+ assert tag_guids or tag_names
+ from_guids = True if (tag_guids is not None) else False
+ tags_original = tag_guids if from_guids else tag_names
+ if self.get_missing_tags(tags_original, from_guids):
+ self.update_tags_database("Missing Tag %s(s) Were found when attempting to get matching tag data" % (
+ 'Guids' if from_guids else 'Names'))
+ missing_tags = self.get_missing_tags(tags_original, from_guids)
+ if missing_tags:
+ identifier = 'Guid' if from_guids else 'Name'
+ keys = ', '.join(sorted(missing_tags))
+ log_error("FATAL ERROR: Tag %s(s) %s were not found on the Evernote Servers" % (identifier, keys))
+ raise EDAMNotFoundException(identifier.lower(), keys)
+ if from_guids:
+ tags_dict = {x: self.tag_data[x] for x in tags_original}
+ else:
+ tags_dict = {[k for k, v in self.tag_data.items() if v.Name is tag_name][0]: tag_name for tag_name in
+ tags_original}
+ tagNamesToImport = get_tag_names_to_import(tags_dict)
+ """:type : dict[string, EvernoteTag]"""
+ if tagNamesToImport:
+ is_struct = None
+ for k, v in tagNamesToImport.items():
+ if is_struct is None:
+ is_struct = isinstance(v, EvernoteTag)
+ tagGuids.append(k)
+ tagNames.append(v.Name if is_struct else v)
+ tagNames = sorted(tagNames, key=lambda s: s.lower())
+ return tagGuids, tagNames
+
+ def check_tags_up_to_date(self):
+ for evernote_guid in self.evernote_guids:
+ if evernote_guid not in self.metadata:
+ log_error('Could not find note metadata for Note ''%s''' % evernote_guid)
+ return False
+ note_metadata = self.metadata[evernote_guid]
+ if not note_metadata.tagGuids:
+ continue
+ for tag_guid in note_metadata.tagGuids:
+ if tag_guid in self.tag_data:
+ continue
+ tag = EvernoteTag(fetch_guid=tag_guid)
+ if not tag.success:
+ return False
+ self.tag_data[tag_guid] = tag
+ return True
diff --git a/anknotes/args.py b/anknotes/args.py
new file mode 100644
index 0000000..0d033a0
--- /dev/null
+++ b/anknotes/args.py
@@ -0,0 +1,204 @@
+# -*- coding: utf-8 -*-
+import re
+from datetime import datetime
+
+### Anknotes Imports
+from anknotes.constants import *
+from anknotes.base import item_to_list, caller_name, is_str, is_seq_type, is_dict_type
+from anknotes.dicts import DictCaseInsensitive
+from anknotes.dicts_base import DictKey
+
+class Args(object):
+
+ def __init__(self, func_kwargs=None, func_args=None, func_args_list=None, set_list=None, set_dict=None,
+ require_all_args=None, limit_max_args=None, override_kwargs=None, use_set_list_as_arg_list=False):
+ from logging import write_file_contents, pf
+ self.__require_all_args = require_all_args or False
+ self.__limit_max_args = limit_max_args if limit_max_args is not None else True
+ self.__override_kwargs = override_kwargs or False
+ self.__set_args_and_kwargs(func_args, func_kwargs)
+ self.__func_args_list = use_set_list_as_arg_list and [set_list[i*2] for i in range(0, len(set_list)/2)] or func_args_list
+ if self.__func_args_list:
+ self.process_args()
+ self.__init_set(set_list, set_dict)
+
+ def __init_set(self, set_list=None, set_dict=None):
+ if not set_list or set_dict:
+ return
+ self.__conv_set_list(set_list)
+ self.set_kwargs(set_list, set_dict)
+
+ def __conv_set_list(self, set_list=None):
+ if set_list is None:
+ return
+ func_args_count = len(self.__func_args_list)
+ for i in range(0, len(set_list)/2):
+ if not set_list[i*2] and func_args_count > i:
+ set_list[i*2] = self.__func_args_list[i]
+
+ def __set_args_and_kwargs(self, *a, **kw)
+ self.__func_args, self.__func_kwargs = self.__get_args_and_kwargs(*a, **kw)
+
+ def __get_args_and_kwargs(self, func_args=None, func_kwargs=None, name=None, allow_cls_override=True):
+ if not func_args and not func_kwargs:
+ return self.args or [], self.kwargs or DictCaseInsensitive()
+ func_args = func_args or allow_cls_override and self.args or []
+ func_kwargs = func_kwargs or allow_cls_override and self.kwargs or DictCaseInsensitive()
+ if is_seq_type(func_kwargs) and is_dict_type(func_args):
+ func_args, func_kwargs = func_kwargs, func_args
+ func_args = self.__args_to_list(func_args)
+ if isinstance(func_kwargs, dict):
+ func_kwargs = DictCaseInsensitive(func_kwargs, key=name, parent_key='kwargs')
+ if not isinstance(func_args, list):
+ func_args = []
+ if not isinstance(func_kwargs, DictCaseInsensitive):
+ func_kwargs = DictCaseInsensitive(key=name)
+ return func_args, func_kwargs
+
+ def __args_to_list(self, func_args):
+ if not is_str(func_args):
+ return list(func_args)
+ lst = []
+ for arg in item_to_list(func_args, chrs=','):
+ lst += [arg] + [None]
+ return lst
+
+ @property
+ def kwargs(self):
+ return self.__func_kwargs
+
+ @property
+ def args(self):
+ return self.__func_args
+
+ @property
+ def keys(self):
+ return self.kwargs.keys()
+
+ def key_transform(self, key, keys=None):
+ if keys is None:
+ keys = self.keys
+ key = key.strip()
+ key_lower = key.lower()
+ for k in keys:
+ if k.lower() == key_lower:
+ return k
+ return key
+
+ def get_kwarg(self, key, **kwargs):
+ kwargs['update_kwargs'] = False
+ return self.process_kwarg(key, **kwargs)
+
+ def process_kwarg(self, key, default=None, func_kwargs=None, replace_none_type=True, delete_from_kwargs=None, return_value_only=True, update_cls_args=True):
+ delete_from_kwargs = delete_from_kwargs is not False
+ cls_kwargs = func_kwargs is None
+ func_kwargs = self.kwargs if cls_kwargs else DictCaseInsensitive(func_kwargs)
+ key = self.key_transform(key, func_kwargs.keys())
+ if key not in func_kwargs:
+ return (func_kwargs, default) if delete_from_kwargs and not return_value_only else default
+ val = func_kwargs[key]
+ if val is None and replace_none_type:
+ val = default
+ if delete_from_kwargs:
+ del func_kwargs[key]
+ if cls_kwargs and update_cls_args:
+ del self.__func_kwargs[key]
+ if not delete_from_kwargs or return_value_only:
+ return val
+ return func_kwargs, val
+
+ def process_kwargs(self, get_args=None, set_dict=None, func_kwargs=None, delete_from_kwargs=True, update_cls_args=True, **kwargs):
+ method_name='process_kwargs'
+ kwargs['return_value_only'] = False
+ cls_kwargs = func_kwargs is None
+ func_kwargs = self.kwargs if cls_kwargs else DictCaseInsensitive(func_kwargs)
+ keys = func_kwargs.keys()
+ for key, value in set_dict.items() if set_dict else []:
+ key = self.key_transform(key, keys)
+ if key not in func_kwargs:
+ func_kwargs[key] = value
+ if not get_args:
+ if cls_kwargs and update_cls_args:
+ self.__func_kwargs = func_kwargs
+ return func_kwargs
+ gets = []
+ for arg in get_args:
+ # for arg in args:
+ if len(arg) is 1 and isinstance(arg[0], list):
+ arg = arg[0]
+ result = self.process_kwarg(arg[0], arg[1], func_kwargs=func_kwargs, delete_from_kwargs=delete_from_kwargs, **kwargs)
+ if delete_from_kwargs:
+ func_kwargs = result[0]
+ result = result[1]
+ gets.append(result)
+ if cls_kwargs and update_cls_args:
+ self.__func_kwargs = func_kwargs
+ if delete_from_kwargs:
+ return [func_kwargs] + gets
+ return gets
+
+ def get_kwarg_values(self, *args, **kwargs):
+ kwargs['return_value_only'] = True
+ if not 'delete_from_kwargs' in kwargs:
+ kwargs['delete_from_kwargs'] = False
+ return self.get_kwargs(*args, **kwargs)
+
+ def get_kwargs(self, *args_list, **kwargs):
+ method_name='get_kwargs'
+ lst = []
+ for args in args_list:
+ if isinstance(args, dict):
+ args = item_to_list(args)
+ args_dict = args
+ if isinstance(args, list):
+ lst += [args[i * 2:i * 2 + 2] for i in range(0, len(args) / 2)]
+ else:
+ lst += [[arg, None] for arg in item_to_list(args)]
+ return self.process_kwargs(get_args=lst, **kwargs)
+
+ def process_args(self, arg_list=None, func_args=None, func_kwargs=None, update_cls_args=True):
+ method_name='process_args'
+ arg_list = item_to_list(arg_list) if arg_list else self.__func_args_list
+ cls_args = func_args is None
+ cls_kwargs = func_kwargs is None
+ func_args, func_kwargs = self.__get_args_and_kwargs(func_args, func_kwargs)
+ arg_error = ''
+ if not func_args:
+ return func_args, func_kwargs
+ for i in range(0, len(arg_list)):
+ if len(func_args) is 0:
+ break
+ arg = arg_list[i]
+ if arg in func_kwargs and not self.__override_kwargs:
+ formats = (caller_name(return_string=True), arg)
+ raise TypeError("Anknotes.Args: %s() got multiple arguments for keyword argument '%s'" % formats)
+ func_kwargs[arg] = func_args[0]
+ del func_args[0]
+ else:
+ if self.__require_all_args:
+ arg_error = 'least'
+ if func_args and self.__limit_max_args:
+ arg_error = 'most'
+ if arg_error:
+ formats = (caller_name(return_string=True), arg_error, len(arg_list), '' if arg_list is 1 else 's', len(func_args))
+ raise TypeError('Anknotes.Args: %s() takes at %s %d argument%s (%d given)' % formats)
+ if cls_args and update_cls_args:
+ self.__func_args = func_args
+ if cls_kwargs and update_cls_args:
+ self.__func_kwargs = func_kwargs
+ return func_args, func_kwargs
+
+ def set_kwargs(self, set_list=None, set_dict=None, func_kwargs=None, name=None, delete_from_kwargs=None, *args, **kwargs):
+ method_name='set_kwargs'
+ new_args = []
+ lst, dct = self.__get_args_and_kwargs(set_list, set_dict, allow_cls_override=False)
+ if isinstance(lst, list):
+ dct.update({lst[i * 2]: lst[i * 2 + 1] for i in range(0, len(lst) / 2)})
+ lst = []
+ for arg in args:
+ new_args += item_to_list(arg, False)
+ dct.update({key: None for key in item_to_list(lst, chrs=',') + new_args})
+ dct.update(kwargs)
+ dct_items = dct.items()
+ processed_kwargs = self.process_kwargs(func_kwargs=func_kwargs, set_dict=dct, name=name, delete_from_kwargs=delete_from_kwargs).items()
+ return self.process_kwargs(func_kwargs=func_kwargs, set_dict=dct, name=name, delete_from_kwargs=delete_from_kwargs)
\ No newline at end of file
diff --git a/anknotes/base.py b/anknotes/base.py
new file mode 100644
index 0000000..a574b46
--- /dev/null
+++ b/anknotes/base.py
@@ -0,0 +1,362 @@
+# -*- coding: utf-8 -*-
+import re
+from fnmatch import fnmatch
+import inspect
+from collections import defaultdict, Iterable
+from bs4 import UnicodeDammit
+import string
+from datetime import datetime
+
+
+### Anknotes Imports
+from anknotes.imports import in_anki
+
+### Anki Imports
+if in_anki():
+ from aqt import mw
+
+class SafeDict(defaultdict):
+ def __init__(self, *a, **kw):
+ for i, arg in enumerate(a):
+ if arg is None:
+ raise TypeError("SafeDict arg %d is NoneType" % (i + 1))
+ dct = dict(*a, **kw)
+ super(self.__class__, self).__init__(self.__missing__, dct)
+
+ def __getitem__(self, key):
+ item = super(self.__class__, self).__getitem__(key)
+ if isinstance(item, dict):
+ item = SafeDict(item)
+ return item
+
+ def __missing__(self, key):
+ return '{' + key + '}'
+
+def decode(str_, is_html=False, errors='strict'):
+ if isinstance(str_, unicode):
+ return str_
+ if isinstance(str_, str):
+ return UnicodeDammit(str_, ['utf-8'], is_html=is_html).unicode_markup
+ return unicode(str_, 'utf-8', errors)
+
+def decode_html(str_):
+ return decode(str_, True)
+
+def encode(str_):
+ if isinstance(str_, unicode):
+ return str_.encode('utf-8')
+ return str_
+
+def is_str(str_):
+ return str_ and is_str_type(str_)
+
+def is_str_type(str_):
+ return isinstance(str_, (str, unicode))
+
+def is_seq_type(*a):
+ for item in a:
+ if not isinstance(item, Iterable) or not hasattr(item, '__iter__'):
+ return False
+ return True
+
+def is_dict_type(*a):
+ for item in a:
+ if not isinstance(item, dict) or hasattr(item, '__dict__'):
+ return False
+ return True
+
+def get_unique_strings(*a):
+ lst=[]
+ items=[]
+ if a and isinstance(a[0], dict):
+ lst = a[0].copy()
+ a = a[0].items()
+ else:
+ a = enumerate(a)
+ for key, str_ in sorted(a):
+ if isinstance(str_, list):
+ str_, attr = str_
+ str_ = getattr(str_, attr, None)
+ if not str_ or str_ in lst or str_ in items:
+ if isinstance(lst, list):
+ lst.append('')
+ else:
+ lst[key] = ''
+ continue
+ items.append(str_)
+ str_ = str(str_)
+ if isinstance(lst, list):
+ lst.append(str_)
+ else:
+ lst[key] = str_
+ return lst
+
+def call(func, *a, **kw):
+ if not callable(func):
+ return func
+ spec=inspect.getargspec(func)
+ if not spec.varargs:
+ a = a[:len(spec.args)]
+ if not spec.keywords:
+ kw = {key:value for key, value in kw.items() if key in spec.args}
+ return func(*a, **kw)
+
+def fmt(str_, recursion=None, *a, **kw):
+ """
+ :type str_: str | unicode
+ :type recursion : int | dict | list
+ :rtype: str | unicode
+ """
+ if not isinstance(recursion, int):
+ if recursion is not None:
+ a = [recursion] + list(a)
+ recursion = 1
+ dct = SafeDict(*a, **kw)
+ str_ = string.Formatter().vformat(str_, [], dct)
+ if recursion <= 0:
+ return str_
+ return fmt(str_, recursion-1, *a, **kw)
+
+def pad_digits(*a, **kw):
+ conv = []
+ for str_ in a:
+ if isinstance(str_, int):
+ str_ = str(str_)
+ if not is_str_type(str_):
+ conv.append('')
+ else:
+ conv.append(str_.rjust(3) if str_.isdigit() else str_)
+ if len(conv) is 1:
+ return conv[0]
+ return conv
+
+def str_safe(str_, prefix=''):
+ repr_ = str_.__repr__()
+ try:
+ str_ = str(prefix + repr_)
+ except Exception:
+ str_ = str(prefix + encode(repr_, errors='replace'))
+ return str_
+
+def str_split_case(str_, ignore_underscore=False):
+ words=[]
+ word=''
+ for chr in str_:
+ last_chr = word[-1:]
+ if chr.isupper() and (last_chr.islower() or (ignore_underscore and last_chr is '_')):
+ words.append(word)
+ word = ''
+ word += chr
+ return words + [word]
+
+def str_capitalize(str_, phrase_delimiter='.', word_delimiter='_'):
+ phrases = str_.split(phrase_delimiter)
+ return ''.join(''.join([word.capitalize() for word in phrase.split(word_delimiter)]) for phrase in phrases)
+
+def in_delimited_str(key, str_, chr='|', case_insensitive=True):
+ if case_insensitive:
+ key = key.lower()
+ str_ = str_.lower()
+ return key in str_.strip(chr).split(chr)
+
+def print_safe(str_, prefix=''):
+ print str_safe(str_, prefix)
+
+def item_to_list(item, list_from_unknown=True, chrs='', split_chr='|'):
+ if is_seq_type(item):
+ return list(item)
+ if isinstance(item, dict):
+ return [y for x in item.items() for y in x]
+ if is_str(item):
+ for c in chrs:
+ item = item.replace(c, split_chr or ' ')
+ return item.split(split_chr)
+ if item is None:
+ return []
+ if list_from_unknown:
+ return [item]
+ return item
+
+def item_to_set(item, **kwargs):
+ if isinstance(item, set):
+ return item
+ item = item_to_list(item, **kwargs)
+ if not isinstance(item, list):
+ return item
+ return set(item)
+
+def matches_list(item, lst):
+ item = item.lower()
+ for index, value in enumerate(item_to_list(lst)):
+ value = value.lower()
+ if fnmatch(item, value) or fnmatch(item + 's', value):
+ return index + 1
+ return 0
+
+def get_default_value(cls, default=None):
+ if default is not None:
+ return default
+ if cls is str or cls is unicode:
+ return ''
+ elif cls is int:
+ return 0
+ elif cls is bool:
+ return False
+ return None
+
+def key_transform(mapping, key, all=False):
+ key_lower = key.lower()
+ match = [k for k in (mapping if isinstance(mapping, Iterable) and not all else dir(mapping)) if k.lower() == key_lower]
+ return match and match[0] or key
+
+def delete_keys(mapping, keys_to_delete):
+ if not isinstance(keys_to_delete, list):
+ keys_to_delete = item_to_list(keys_to_delete, chrs=' *,')
+ for key in keys_to_delete:
+ key = key_transform(mapping, key)
+ if key in mapping:
+ del mapping[key]
+
+def ank_prop(self, keys, fget=None, fset=None, fdel=None, doc=None):
+ for key in list(keys):
+ all_args=locals()
+ args = {}
+ try:
+ property_ = getattr(self.__class__, key)
+ except AttributeError:
+ property_ = property()
+
+ for v in ['fget', 'fset', 'fdel']:
+ args[v] = all_args[v]
+ if not args[v]:
+ args[v] = getattr(property_, v)
+ if is_str(args[v]):
+ args[v] = getattr(self.__class__, args[v])
+ if isinstance(args[v], property):
+ args[v] = getattr(args[v], v)
+ if not doc:
+ doc = property_.__doc__
+ if not doc:
+ doc = fget.__doc__
+ args['doc'] = doc
+ property_ = property(**args)
+ setattr(self.__class__, key, property_)
+
+def get_friendly_interval_string(lastImport):
+ if not lastImport:
+ return ""
+ from anknotes.constants import ANKNOTES
+ td = (datetime.now() - datetime.strptime(lastImport, ANKNOTES.DATE_FORMAT))
+ days = td.days
+ hours, remainder = divmod(td.total_seconds(), 3600)
+ minutes, seconds = divmod(remainder, 60)
+ if days > 1:
+ lastImportStr = "%d days" % td.days
+ else:
+ hours = round(hours)
+ hours_str = '' if hours == 0 else ('1:%02d hr' % minutes) if hours == 1 else '%d Hours' % hours
+ if days == 1:
+ lastImportStr = "One Day%s" % ('' if hours == 0 else ', ' + hours_str)
+ elif hours > 0:
+ lastImportStr = hours_str
+ else:
+ lastImportStr = "%d:%02d min" % (minutes, seconds)
+ return lastImportStr
+
+
+def clean_evernote_css(str_):
+ remove_style_attrs = '-webkit-text-size-adjust: auto|-webkit-text-stroke-width: 0px|background-color: rgb(255, 255, 255)|color: rgb(0, 0, 0)|font-family: Tahoma|font-size: medium;|font-style: normal|font-variant: normal|font-weight: normal|letter-spacing: normal|orphans: 2|text-align: -webkit-auto|text-indent: 0px|text-transform: none|white-space: normal|widows: 2|word-spacing: 0px|word-wrap: break-word|-webkit-nbsp-mode: space|-webkit-line-break: after-white-space'.replace(
+ '(', '\\(').replace(')', '\\)')
+ # 'margin: 0px; padding: 0px 0px 0px 40px; '
+ return re.sub(r' ?(%s);? ?' % remove_style_attrs, '', str_).replace(' style=""', '')
+
+
+def caller_names(return_string=True, simplify=True):
+ return [c.Base if return_string else c for c in [__caller_name(i, simplify) for i in range(0, 20)] if
+ c and c.Base]
+
+
+class CallerInfo:
+ Class = []
+ Module = []
+ Outer = []
+ Name = ""
+ simplify = True
+ __keywords_exclude = ['pydevd', 'logging', 'base', '__caller_name', 'stopwatch', 'process_args']
+ __keywords_strip = ['__maxin__', 'anknotes', '']
+ __outer = []
+ filtered = True
+
+ @property
+ def __trace(self):
+ return self.Module + self.Outer + self.Class + [self.Name]
+
+ @property
+ def Trace(self):
+ t = self.__strip(self.__trace)
+ return t if not self.filtered or not [e for e in self.__keywords_exclude if e in t] else []
+
+ @property
+ def Base(self):
+ return '.'.join(self.__strip(self.Module + self.Class + [self.Name])) if self.Trace else ''
+
+ @property
+ def Full(self):
+ return '.'.join(self.Trace)
+
+ def __strip(self, lst):
+ return [t for t in lst if t and t not in self.__keywords_strip]
+
+ def __init__(self, parentframe=None):
+ """
+
+ :rtype : CallerInfo
+ """
+ if not parentframe:
+ return
+ self.Class = parentframe.f_locals['self'].__class__.__name__.split(
+ '.') if 'self' in parentframe.f_locals else []
+ module = inspect.getmodule(parentframe)
+ self.Module = module.__name__.split('.') if module else []
+ self.Name = parentframe.f_code.co_name if parentframe.f_code.co_name is not '' else ''
+ self.__outer = [[f[1], f[3]] for f in inspect.getouterframes(parentframe) if f]
+ self.__outer.reverse()
+ self.Outer = [f[1] for f in self.__outer if
+ f and f[1] and not [exclude for exclude in self.__keywords_exclude + [self.Name] if
+ exclude in f[0] or exclude in f[1]]]
+ del parentframe
+
+
+def create_log_filename(str_):
+ if str_ is None:
+ return ""
+ str_ = str_.replace('.', '\\')
+ str_ = re.sub(r"(^|\\)([^\\]+)\\\2(\b.|\\.|$)", r"\1\2\\", str_)
+ str_ = re.sub(r"^\\*(.+?)\\*$", r"\1", str_)
+ return str_
+
+
+# @clockit
+def caller_name(skip=None, simplify=True, return_string=False, return_filename=False):
+ if skip is None:
+ names = [__caller_name(i, simplify) for i in range(0, 20)]
+ else:
+ names = [__caller_name(skip, simplify=simplify)]
+ for c in [c for c in names if c and c.Base]:
+ return create_log_filename(c.Base) if return_filename else c.Base if return_string else c
+ return "" if return_filename or return_string else None
+
+
+def __caller_name(skip=0, simplify=True):
+ """
+ :rtype : CallerInfo
+ """
+ stack = inspect.stack()
+ start = 0 + skip
+ if len(stack) < start + 1:
+ return None
+ parentframe = stack[start][0]
+ c_info = CallerInfo(parentframe)
+ del parentframe
+ return c_info
+
diff --git a/anknotes/constants.py b/anknotes/constants.py
new file mode 100644
index 0000000..969bfb5
--- /dev/null
+++ b/anknotes/constants.py
@@ -0,0 +1,4 @@
+# -*- coding: utf-8 -*-
+
+from anknotes.constants_standard import *
+from anknotes.constants_settings import *
diff --git a/anknotes/constants_default.py b/anknotes/constants_default.py
new file mode 100644
index 0000000..289cc6f
--- /dev/null
+++ b/anknotes/constants_default.py
@@ -0,0 +1,210 @@
+import os
+from datetime import timedelta, datetime
+
+PATH = os.path.dirname(os.path.abspath(__file__))
+
+class FOLDERS():
+ ADDONS = os.path.dirname(PATH)
+ EXTRA = os.path.join(PATH, 'extra')
+ ANCILLARY = os.path.join(EXTRA, 'ancillary')
+ GRAPHICS = os.path.join(EXTRA, 'graphics')
+ LOGS = os.path.join(EXTRA, 'logs')
+ DEVELOPER = os.path.join(EXTRA, 'dev')
+ USER = os.path.join(EXTRA, 'user')
+
+
+class FILES():
+ class LOGS():
+ class FDN():
+ ANKI_ORPHANS = 'Find Deleted Notes\\'
+ UNIMPORTED_EVERNOTE_NOTES = ANKI_ORPHANS + 'UnimportedEvernoteNotes'
+ ANKI_TITLE_MISMATCHES = ANKI_ORPHANS + 'AnkiTitleMismatches'
+ ANKNOTES_TITLE_MISMATCHES = ANKI_ORPHANS + 'AnknotesTitleMismatches'
+ ANKNOTES_ORPHANS = ANKI_ORPHANS + 'AnknotesOrphans'
+ ANKI_ORPHANS += 'AnkiOrphans'
+
+ BASE_NAME = ''
+ DEFAULT_NAME = 'anknotes'
+ MAIN = DEFAULT_NAME
+ ACTIVE = DEFAULT_NAME
+ USE_CALLER_NAME = False
+ ENABLED = ['*']
+ DISABLED = ['finder*', 'args*', 'counter*', 'Dicts*']
+ SEE_ALSO_DISABLED = [4,6]
+
+ class ANCILLARY():
+ TEMPLATE = os.path.join(FOLDERS.ANCILLARY, 'FrontTemplate.htm')
+ CSS = u'_AviAnkiCSS.css'
+ CSS_QMESSAGEBOX = os.path.join(FOLDERS.ANCILLARY, 'QMessageBox.css')
+ ENML_DTD = os.path.join(FOLDERS.ANCILLARY, 'enml2.dtd')
+
+ class SCRIPTS():
+ VALIDATION = os.path.join(FOLDERS.ADDONS, 'anknotes_start_note_validation.py')
+ FIND_DELETED_NOTES = os.path.join(FOLDERS.ADDONS, 'anknotes_start_find_deleted_notes.py')
+
+ class GRAPHICS():
+ class ICON():
+ EVERNOTE_WEB = os.path.join(FOLDERS.GRAPHICS, u'evernote_web.ico')
+ EVERNOTE_ARTCORE = os.path.join(FOLDERS.GRAPHICS, u'evernote_artcore.ico')
+ TOMATO = os.path.join(FOLDERS.GRAPHICS, u'Tomato-icon.ico')
+
+ class IMAGE():
+ EVERNOTE_WEB = None
+ EVERNOTE_ARTCORE = None
+
+ IMAGE.EVERNOTE_WEB = ICON.EVERNOTE_WEB.replace('.ico', '.png')
+ IMAGE.EVERNOTE_ARTCORE = ICON.EVERNOTE_ARTCORE.replace('.ico', '.png')
+
+ class USER():
+ TABLE_OF_CONTENTS_ENEX = os.path.join(FOLDERS.USER, "Table of Contents.enex")
+ LAST_PROFILE_LOCATION = os.path.join(FOLDERS.USER, 'anki.profile')
+
+
+class ANKNOTES():
+ DATE_FORMAT = '%Y-%m-%d %H:%M:%S'
+ CACHE_SEARCHES = False
+ UPDATE_DB_ON_START = False
+
+ class HOOKS():
+ DB = True
+ SEARCH = True
+
+ class LXML():
+ ENABLE_IN_ANKI = False
+
+ class DEVELOPER_MODE:
+ ENABLED = (os.path.isfile(os.path.join(FOLDERS.DEVELOPER, 'anknotes.developer')))
+ AUTOMATED = ENABLED and (os.path.isfile(os.path.join(FOLDERS.DEVELOPER, 'anknotes.developer.automate')))
+ AUTO_RELOAD_MODULES = True
+
+ class HIERARCHY():
+ ROOT_TITLES_BASE_QUERY = ""
+
+ class FORMATTING():
+ BANNER_MINIMUM = 80
+ COUNTER_BANNER_MINIMUM = 40
+ LINE_PADDING_HEADER = 31
+ LINE_LENGTH_TOTAL = 191
+ LINE_LENGTH = LINE_LENGTH_TOTAL - 2
+ LIST_PAD = 25
+ PROGRESS_SUMMARY_PAD = 31
+ PPRINT_WIDTH = 80
+ TIMESTAMP_PAD = '\t' * 6
+ TIMESTAMP_PAD_LENGTH = len(TIMESTAMP_PAD.replace('\t', ' ' * 4))
+ TEXT_LENGTH = LINE_LENGTH_TOTAL - TIMESTAMP_PAD_LENGTH
+
+
+class MODELS():
+ class TYPES():
+ CLOZE = 1
+
+ class OPTIONS():
+ IMPORT_STYLES = True
+
+ DEFAULT = 'evernote_note'
+ REVERSIBLE = 'evernote_note_reversible'
+ REVERSE_ONLY = 'evernote_note_reverse_only'
+ CLOZE = 'evernote_note_cloze'
+
+
+class TEMPLATES():
+ DEFAULT = 'EvernoteReview'
+ REVERSED = 'EvernoteReviewReversed'
+ CLOZE = 'EvernoteReviewCloze'
+
+
+class FIELDS():
+ TITLE = 'Title'
+ CONTENT = 'Content'
+ SEE_ALSO = 'See_Also'
+ TOC = 'TOC'
+ OUTLINE = 'Outline'
+ EXTRA = 'Extra'
+ EVERNOTE_GUID = 'Evernote GUID'
+ UPDATE_SEQUENCE_NUM = 'updateSequenceNum'
+ EVERNOTE_GUID_PREFIX = 'evernote_guid='
+ LIST = [TITLE, CONTENT, SEE_ALSO, EXTRA, TOC, OUTLINE,
+ UPDATE_SEQUENCE_NUM]
+
+ class ORD():
+ EVERNOTE_GUID = 0
+
+ ORD.CONTENT = LIST.index(CONTENT) + 1
+ ORD.SEE_ALSO = LIST.index(SEE_ALSO) + 1
+
+
+class DECKS():
+ DEFAULT = "Evernote"
+ TOC_SUFFIX = "::See Also::TOC"
+ OUTLINE_SUFFIX = "::See Also::Outline"
+
+
+class ANKI():
+ PROFILE_NAME = ''
+ NOTE_LIGHT_PROCESSING_INCLUDE_CSS_FORMATTING = False
+
+
+class TAGS():
+ TOC = '#TOC'
+ TOC_AUTO = '#TOC.Auto'
+ OUTLINE = '#Outline'
+ OUTLINE_TESTABLE = '#Outline.Testable'
+ REVERSIBLE = '#Reversible'
+ REVERSE_ONLY = '#Reversible_Only'
+
+
+class EVERNOTE():
+ class IMPORT():
+ class PAGING():
+ # Note that Evernote's API documentation says not to run API calls to findNoteMetadata with any less than a 15 minute interval
+ # Auto Paging is probably only useful in the first 24 hours, when API usage is unlimited, or when executing a search that is likely to have most of the notes up-to-date locally
+ # To keep from overloading Evernote's servers, and flagging our API key, I recommend pausing 5-15 minutes in between searches, the higher the better.
+ class RESTART():
+ INTERVAL = None
+ DELAY_MINIMUM_API_CALLS = 10
+ INTERVAL_OVERRIDE = 60 * 5
+ ENABLED = False
+
+ INTERVAL = 60 * 15
+ INTERVAL_SANDBOX = 60 * 5
+ RESTART.INTERVAL = INTERVAL * 2
+
+ INTERVAL = PAGING.INTERVAL * 4 / 3
+ METADATA_RESULTS_LIMIT = 10000
+ QUERY_LIMIT = 250 # Max returned by API is 250
+ API_CALLS_LIMIT = 300
+
+ class UPLOAD():
+ ENABLED = True # Set False if debugging note creation
+ MAX = -1 # Set to -1 for unlimited
+ RESTART_INTERVAL = 30 # In seconds
+
+ class VALIDATION():
+ ENABLED = True
+ AUTOMATED = False
+
+ class API():
+ class RateLimitErrorHandling:
+ IgnoreError, ToolTipError, AlertError = range(3)
+
+ CONSUMER_KEY = "holycrepe"
+ IS_SANDBOXED = False
+ EDAM_RATE_LIMIT_ERROR_HANDLING = RateLimitErrorHandling.ToolTipError
+ DEBUG_RAISE_ERRORS = False
+
+
+class TABLES():
+ SEE_ALSO = "anknotes_see_also"
+ NOTE_VALIDATION_QUEUE = "anknotes_note_validation_queue"
+ TOC_AUTO = u'anknotes_toc_auto'
+
+ class EVERNOTE():
+ NOTEBOOKS = "anknotes_evernote_notebooks"
+ TAGS = "anknotes_evernote_tags"
+ NOTES = u'anknotes_evernote_notes'
+ NOTES_HISTORY = u'anknotes_evernote_notes_history'
+
+class HEADINGS():
+ TOP = "Summary|Definitions|Classifications|Types|Presentations|Organ Involvement|Age of Onset|Si/Sx|Sx|Signs|Triggers|MCC's|MCCs|Inheritance|Incidence|Prognosis|Derivations|Origins|Embryological Origins|Mechanisms|MOA|Pathophysiology|Indications|Examples|Causes|Causative Organisms|Risk Factors|Complications|Side Effects|Drug S/Es|Associated Conditions|A/w|Diagnosis|Dx|Physical Exam|Labs|Hemodynamic Parameters|Lab Findings|Imaging|Screening Tests|Confirmatory Tests|Xray|CT|MRI"
+ BOTTOM = "Management|Work Up|Tx"
+ NOT_REVERSIBLE = BOTTOM + "|Dx|Diagnosis"
diff --git a/anknotes/constants_settings.py b/anknotes/constants_settings.py
new file mode 100644
index 0000000..9837132
--- /dev/null
+++ b/anknotes/constants_settings.py
@@ -0,0 +1,31 @@
+#Python Imports
+from datetime import datetime, timedelta
+
+
+#Anki Main Imports
+from anknotes.constants_standard import EVERNOTE, DECKS
+
+#Anki Class Imports
+from anknotes.structs_base import UpdateExistingNotes
+from anknotes.dicts import DictSettings
+
+SETTINGS = DictSettings(key='anknotes')
+with SETTINGS as s:
+ s.FORM.LABEL_MINIMUM_WIDTH = 100
+ with s.EVERNOTE as e:
+ e.AUTH_TOKEN.setDefault(lambda dct: dct.key.name + '_' + EVERNOTE.API.CONSUMER_KEY.upper() + ("_SANDBOX" if EVERNOTE.API.IS_SANDBOXED else ""))
+ e.AUTO_PAGING = True
+ with e.QUERY as q:
+ q.TAGS = '#Anki_Import'
+ q.NOTEBOOK = 'My Anki Notebook'
+ with q.LAST_UPDATED.VALUE.ABSOLUTE as a:
+ a.DATE = "{:%Y %m %d}".format(datetime.now() - timedelta(days=7))
+ with e.ACCOUNT as a:
+ a.UID = '0'
+ a.SHARD = 'x999'
+ with s.ANKI as a, a.DECKS as d, a.TAGS as t:
+ a.UPDATE_EXISTING_NOTES = UpdateExistingNotes.UpdateNotesInPlace
+ d.BASE = DECKS.DEFAULT
+ d.EVERNOTE_NOTEBOOK_INTEGRATION = True
+ t.KEEP_TAGS = True
+ t.DELETE_EVERNOTE_QUERY_TAGS = False
diff --git a/anknotes/constants_standard.py b/anknotes/constants_standard.py
new file mode 100644
index 0000000..87004fc
--- /dev/null
+++ b/anknotes/constants_standard.py
@@ -0,0 +1,8 @@
+# -*- coding: utf-8 -*-
+import os
+
+PATH = os.path.dirname(os.path.abspath(__file__))
+if os.path.isfile(os.path.join(PATH, 'constants_user.py')):
+ from anknotes.constants_user import *
+else:
+ from anknotes.constants_default import *
\ No newline at end of file
diff --git a/anknotes/constants_user_example.py b/anknotes/constants_user_example.py
new file mode 100644
index 0000000..a0bacd5
--- /dev/null
+++ b/anknotes/constants_user_example.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+# INSTRUCTIONS:
+# USE THIS FILE TO OVERRIDE THE MAIN SETTINGS FILE
+# RENAME FILE TO constants_user.py
+# DON'T FORGET TO REGENERATE ANY VARIABLES THAT DERIVE FROM THE ONES YOU ARE CHANGING
+from anknotes.constants_default import *
+
+# BEGIN OVERRIDES HERE:
+EVERNOTE.API.IS_SANDBOXED = True
+EVERNOTE.UPLOAD.VALIDATION.AUTOMATED = False
+EVERNOTE.UPLOAD.ENABLED = False
\ No newline at end of file
diff --git a/anknotes/counters.py b/anknotes/counters.py
new file mode 100644
index 0000000..650c7f5
--- /dev/null
+++ b/anknotes/counters.py
@@ -0,0 +1,94 @@
+import os
+import sys
+from anknotes.constants_standard import ANKNOTES
+from anknotes.base import item_to_list, item_to_set, is_str
+from anknotes.dicts import DictNumeric, DictCaseInsensitive
+from anknotes.dicts_base import DictKey
+
+class Counter(DictNumeric):
+ _override_default_ = False
+ _default_ = '_count_'
+ _count_ = 0
+ _my_aggregates_ = 'max|max_allowed'
+ _my_attrs_ = '_count_'
+
+ def __init__(self, *a, **kw):
+ a, cls, mro = list(a), self.__class__, self._get_arg_(a, int, 'mro', kw)
+ super(cls.mro()[mro], self).__init__(mro+1, *a, **kw)
+ self.prop(['count', 'cnt'], 'default')
+ cls.default_override = cls.sum
+
+ def setCount(self, value):
+ self._count_ = value
+
+ def getCount(self):
+ return self._count_
+
+ def getDefault(self, allow_override=True):
+ if allow_override and self._override_default_:
+ return self.default_override
+ return self.sum
+
+
+class EvernoteCounter(Counter):
+ _mro_offset_ = 1
+ _default_override_ = True
+
+ def __init__(self, *a, **kw):
+ a, cls, mro = list(a), self.__class__, self._get_arg_(a, int, 'mro', kw)
+ super(cls.mro()[mro], self).__init__(mro+1, *a, **kw)
+
+ @property
+ def success(self):
+ return self.created + self.updated
+
+ @property
+ def queued(self):
+ return self.created.queued + self.updated.queued
+
+ @property
+ def completed(self):
+ return self.created.completed + self.updated.completed
+
+ @property
+ def delayed(self):
+ return self.skipped + self.queued
+
+ @property
+ def handled(self):
+ return self.total - self.unhandled - self.error
+
+ @property
+ def total(self):
+ return self.count
+
+ def aggregateSummary(self, includeHeader=True):
+ aggs = '!max|!+max_allowed|total|+handled|++success|+++completed|+++queued|++delayed'
+ counts = self._get_summary_(header_only=True) if includeHeader else []
+ parents, last_level = [], 1
+ for key_code in aggs.split('|'):
+ override_default = key_code[0] is not '!'
+ counts += [DictCaseInsensitive(marker='*' if override_default else ' ', child_values={}, children=[''])]
+ if not override_default:
+ key_code = key_code[1:]
+ key = key_code.lstrip('+')
+ counts.level, counts.value = len(key_code) - len(key) + 1, getattr(self, key)
+ counts.class_name = type(counts.value)
+ if counts.class_name is not int:
+ counts.value = counts.value.getDefault()
+ parent_lbl = '.'.join(parents)
+ counts.key, counts.label = DictKey(key, parent_lbl), DictKey(key, parent_lbl, 'label')
+ if counts.level < last_level:
+ del parents[-1]
+ elif counts.level > last_level:
+ parents.append(key)
+ last_level = counts.level
+ return self._summarize_lines_(counts, includeHeader)
+
+ def fullSummary(self, title='Evernote Counter'):
+ return '\n'.join(
+ [self.make_banner(title + ": Summary"),
+ self.__repr__(),
+ ' ',
+ self.make_banner(title + ": Aggregates"),
+ self.aggregateSummary(False)])
diff --git a/anknotes/create_subnotes.py b/anknotes/create_subnotes.py
new file mode 100644
index 0000000..d80751d
--- /dev/null
+++ b/anknotes/create_subnotes.py
@@ -0,0 +1,177 @@
+# -*- coding: utf-8 -*-
+# Python Imports
+from bs4 import BeautifulSoup, NavigableString, Tag
+from copy import copy
+
+try:
+ from pysqlite2 import dbapi2 as sqlite
+except ImportError:
+ from sqlite3 import dbapi2 as sqlite
+# Anknotes Shared Imports
+from anknotes.shared import *
+from anknotes.imports import import_lxml
+from anknotes.constants import *
+from anknotes.base import matches_list, fmt, decode_html
+from anknotes.dicts import DictCaseInsensitive
+from anknotes.logging import show_tooltip
+
+# Anknotes Main Imports
+import anknotes.Controller
+# from anknotes.Controller import Controller
+
+# Anki Imports
+from aqt.qt import SIGNAL, QMenu, QAction
+from aqt import mw
+from aqt.utils import getText
+
+def create_subnotes(guids):
+ def create_subnote(guid):
+ def process_lists(note, lst, levels=None, names=None):
+ def add_log_entry(title, content, filename=None, prefix_content=True, title_pad=16, **kw):
+ names_padded = u''.join(map(lambda x: (x+':').ljust(33) + ' ', names[1:-1])) + names[-1]
+ fmts = dict(levels_pad=u'\t' * level, levels=u'.'.join(map(str, levels)),
+ num_levels=len(levels), names=u': '.join(names[1:]).ljust(20),
+ names_padded=names_padded)
+ fmts['levels_str'] = (fmts['levels'] + ':').ljust(6)
+ if prefix_content:
+ fmts['content'] = content
+ content = u'{levels_pad}{levels_str} {content}'
+ if isinstance(lst_items, Tag) and lst_items.name in list_tag_names:
+ fmts['list_name'] = list_tag_names[lst_items.name]
+ content = fmt(content, 0, fmts)
+ if title:
+ title = (fmt(title, 0, fmts) + u': ').ljust(title_pad)
+ l.go(title + content, filename, **kw)
+
+ def process_tag():
+ def get_log_fn():
+ return u'.'.join(map(str, levels)) + u' - ' + u'-'.join(names[1:])
+ def log_tag():
+ if not lst_items.contents:
+ add_log_entry('NO TOP TEXT', decode_html(lst_items.contents), crosspost='no_top_text')
+ if lst_items.name in list_tag_names:
+ add_log_entry('{list_name}', '{levels_pad}[{num_levels}] {levels}', prefix_content=False)
+ elif lst_items.name != 'li':
+ add_log_entry('OTHER TAG', decode(lst_items.contents[0]) if lst_items.contents else u'N/A')
+ elif not sublist.is_subnote:
+ add_log_entry('LIST ITEM', strip_tags(u''.join(sublist.list_items), True).strip())
+ else:
+ subnote_fn = u'..\\subnotes\\process_tag*\\' + get_log_fn()
+ subnote_shared = '*\\..\\..\\subnotes\\process_tag-all'
+ l.banner(u': '.join(names), subnote_fn)
+ if not create_subnote.logged_subnote:
+ l.blank(subnote_shared)
+ l.banner(title, subnote_shared, clear=False, append_newline=False)
+ l.banner(title, '..\\subnotes\\process_tag')
+ create_subnote.logged_subnote = True
+ add_log_entry('SUBNOTE', sublist.heading)
+ add_log_entry('', sublist.heading, '..\\subnotes\\process_tag', crosspost=subnote_fn)
+ add_log_entry('{levels}', '{names_padded}', subnote_shared, prefix_content=False, title_pad=13)
+ l.go(decode_html(sublist.subnote), subnote_fn)
+
+ def add_note(sublist, new_levels, new_names):
+ subnote_html = decode_html(sublist.subnote)
+ log_fn = u'..\\subnotes\\add_note*\\' + get_log_fn()
+ add_log_entry('SUBNOTE', '{levels_str} {names}: \n%s\n' % subnote_html, '..\\subnotes\\add_note', crosspost=log_fn, prefix_content=False)
+ myNotes.append([new_levels, new_names, subnote_html])
+
+ def process_list_item(contents):
+ def check_subnote(li, sublist):
+ def check_heading_flags():
+ if not isinstance(sublist.heading_flags, list):
+ sublist.heading_flags = []
+ for str_ in "`':":
+ if sublist.heading.endswith(str_):
+ sublist.heading_flags.append(str_)
+ sublist.heading = sublist.heading[:-1*len(str_)]
+ check_heading_flags()
+ return
+
+ #Begin check_subnote()
+ if not (isinstance(li, Tag) and (li.name in list_tag_names) and li.contents and li.contents[0]):
+ sublist.list_items.append(decode_html(li))
+ return sublist
+ sublist.heading = strip_tags(decode_html(''.join(sublist.list_items)), True).strip()
+ sublist.base_title = u': '.join(names).replace(title + ': ', '')
+ sublist.is_reversible = not matches_list(sublist.heading, HEADINGS.NOT_REVERSIBLE)
+ check_heading_flags()
+ if "`" in sublist.heading_flags:
+ sublist.is_reversible = not sublist.is_reversible
+ sublist.use_descriptor = "'" in sublist.heading_flags or "`" in sublist.heading_flags
+ sublist.is_subnote = ':' in sublist.heading_flags or matches_list(sublist.heading, HEADINGS.TOP + '|' + HEADINGS.BOTTOM)
+ if not sublist.is_subnote:
+ return sublist
+ sublist.subnote = li
+ return sublist
+
+ # Begin process_list_item()
+ sublist = DictCaseInsensitive(is_subnote=False, list_items=[])
+ for li in contents:
+ sublist = check_subnote(li, sublist)
+ if sublist.is_subnote:
+ break
+ return sublist
+
+ # Begin process_tag()
+ new_levels = levels[:]
+ new_names = names[:]
+ if lst_items.name in list_tag_names:
+ new_levels.append(0)
+ new_names.append('CHILD ' + lst_items.name.upper())
+ elif lst_items.name == 'li':
+ levels[-1] = new_levels[-1] = levels[-1] + 1
+ sublist = process_list_item(lst_items.contents)
+ if sublist.is_subnote:
+ names[-1] = new_names[-1] = sublist.heading
+ add_note(sublist, new_levels, new_names)
+ else:
+ names[-1] = new_names[-1] = sublist.heading if sublist.heading else 'Xx' + strip_tags(unicode(''.join(sublist.list_items)), True).strip()
+ log_tag()
+ if lst_items.name in list_tag_names or lst_items.name == 'li':
+ process_lists(note, lst_items.contents, new_levels, new_names)
+
+ # Begin process_lists()
+ if levels is None or names is None:
+ levels = []
+ names = [title]
+ level = len(levels)
+ for lst_items in lst:
+ if isinstance(lst_items, Tag):
+ process_tag()
+ elif isinstance(lst_items, NavigableString):
+ add_log_entry('NAV STRING', decode_html(lst_items).strip(), crosspost=['nav_strings', '*\\..\\..\\nav_strings'])
+ else:
+ add_log_entry('LST ITEMS', lst_items.__class__.__name__, crosspost=['unexpected-type', '*\\..\\..\\unexpected-type'])
+
+ #Begin create_subnote()
+ content = db.scalar("guid = ?", guid, columns='content')
+ title = note_title = get_evernote_title_from_guid(guid)
+ l.path_suffix = '\\' + title
+ soup = BeautifulSoup(content)
+ en_note = soup.find('en-note')
+ note = DictCaseInsensitive(descriptor=None)
+ first_div = en_note.find('div')
+ if first_div:
+ descriptor_text = first_div.text
+ if descriptor_text.startswith('`'):
+ note.descriptor = descriptor_text[1:]
+ lists = en_note.find(['ol', 'ul'])
+ lists_all = soup.findAll(['ol', 'ul'])
+ l.banner(title, crosspost='strings')
+ create_subnote.logged_subnote = False
+ process_lists(note, [lists])
+ l.go(decode_html(lists), 'lists', clear=True)
+ l.go(soup.prettify(), 'full', clear=True)
+
+ #Begin create_subnotes()
+ list_tag_names = {'ul': 'UNORDERED LIST', 'ol': 'ORDERED LIST'}
+ db = ankDB()
+ myNotes = []
+ if import_lxml() is False:
+ return False
+ from anknotes.imports import lxml
+ l = Logger('Create Subnotes\\', default_filename='bs4', timestamp=False, rm_path=True)
+ l.base_path += 'notes\\'
+ for guid in guids:
+ create_subnote(guid)
+
diff --git a/anknotes/db.py b/anknotes/db.py
new file mode 100644
index 0000000..dbeab4b
--- /dev/null
+++ b/anknotes/db.py
@@ -0,0 +1,585 @@
+### Python Imports
+import time
+from datetime import datetime
+from copy import copy
+import os
+try:
+ from pysqlite2 import dbapi2 as sqlite
+except ImportError:
+ from sqlite3 import dbapi2 as sqlite
+
+### For PyCharm code completion
+# from anknotes import _sqlite3
+
+### Anki Shared Imports
+from anknotes.constants import *
+from anknotes.base import is_str, item_to_list, fmt, is_dict_type, is_seq_type, encode
+from anknotes.args import Args
+from anknotes.logging import log_sql, log, log_error, log_blank, pf
+from anknotes.dicts import DictCaseInsensitive
+from anknotes.imports import in_anki
+
+### Anki Imports
+if in_anki():
+ from aqt import mw
+ from anki.utils import ids2str, splitFields
+
+ankNotesDBInstance = None
+dbLocal = False
+
+lastHierarchyUpdate = datetime.now()
+
+
+def anki_profile_path_root():
+ return os.path.abspath(os.path.join(os.path.dirname(PATH), '..' + os.path.sep))
+
+
+def last_anki_profile_name():
+ root = anki_profile_path_root()
+ name = ANKI.PROFILE_NAME
+ if name and os.path.isdir(os.path.join(root, name)):
+ return name
+ if os.path.isfile(FILES.USER.LAST_PROFILE_LOCATION):
+ name = file(FILES.USER.LAST_PROFILE_LOCATION, 'r').read().strip()
+ if name and os.path.isdir(os.path.join(root, name)):
+ return name
+ dirs = [x for x in os.listdir(root) if os.path.isdir(os.path.join(root, x)) and x is not 'addons']
+ if not dirs:
+ return ""
+ return dirs[0]
+
+
+def ankDBSetLocal():
+ global dbLocal
+ dbLocal = True
+
+
+def ankDBIsLocal():
+ global dbLocal
+ return dbLocal
+
+
+def ankDB(table=None,reset=False):
+ global ankNotesDBInstance, dbLocal
+ if not ankNotesDBInstance or reset:
+ path = None
+ if dbLocal:
+ path = os.path.abspath(os.path.join(anki_profile_path_root(), last_anki_profile_name(), 'collection.anki2'))
+ ankNotesDBInstance = ank_DB(path)
+ if table:
+ db_copy = ank_DB(init_db=False, table=table)
+ db_copy._db = ankNotesDBInstance._db
+ db_copy._path = ankNotesDBInstance._path
+ return db_copy
+ return ankNotesDBInstance
+
+
+def escape_text_sql(title):
+ return title.replace("'", "''")
+
+
+def delete_anki_notes_and_cards_by_guid(evernote_guids):
+ data = [[FIELDS.EVERNOTE_GUID_PREFIX + x] for x in evernote_guids]
+ db = ankDB()
+ db.executemany("DELETE FROM cards WHERE nid in (SELECT id FROM notes WHERE flds LIKE ? || '%')", data)
+ db.executemany("DELETE FROM notes WHERE flds LIKE ? || '%'", data)
+
+
+def get_evernote_title_from_guid(guid):
+ return ankDB().scalar("SELECT title FROM {n} WHERE guid = '%s'" % guid)
+
+
+def get_evernote_titles_from_nids(nids):
+ return get_evernote_titles(nids, 'nid')
+
+
+def get_evernote_titles(guids, column='guid'):
+ return ankDB().list("SELECT title FROM {n} WHERE %s IN (%s) ORDER BY title ASC" %
+ (column, ', '.join(["'%s'" % x for x in guids])))
+
+
+def get_anki_deck_id_from_note_id(nid):
+ return long(ankDB().scalar("SELECT did FROM cards WHERE nid = ? LIMIT 1", nid))
+
+
+def get_anki_fields_from_evernote_guids(guids):
+ lst = isinstance(guids, list)
+ if not lst:
+ guids = [guids]
+ db = ankDB()
+ results = [db.scalar("SELECT flds FROM notes WHERE flds LIKE '{guid_prefix}' || ? || '%'", guid) for guid in guids]
+ if not lst:
+ return results[0] if results else None
+ return results
+
+def get_anki_card_ids_from_evernote_guids(guids, sql=None):
+ pred = "n.flds LIKE '%s' || ? || '%%'" % FIELDS.EVERNOTE_GUID_PREFIX
+ if sql is None:
+ sql = "SELECT c.id FROM cards c, notes n WHERE c.nid = n.id AND ({pred})"
+ return execute_sqlite_query(sql, guids, pred=pred)
+
+
+def get_anki_note_id_from_evernote_guid(guid):
+ return ankDB().scalar("SELECT n.id FROM notes n WHERE n.flds LIKE '%s' || ? || '%%'" % FIELDS.EVERNOTE_GUID_PREFIX,
+ guid)
+
+
+def get_anki_note_ids_from_evernote_guids(guids):
+ return get_anki_card_ids_from_evernote_guids(guids, "SELECT n.id FROM notes n WHERE {pred}")
+
+
+def get_paired_anki_note_ids_from_evernote_guids(guids):
+ return get_anki_card_ids_from_evernote_guids([[x, x] for x in guids],
+ "SELECT n.id, n.flds FROM notes n WHERE {pred}")
+
+
+def get_anknotes_root_notes_nids():
+ return get_cached_data(get_anknotes_root_notes_nids, lambda: get_anknotes_root_notes_guids('nid'))
+
+
+def get_cached_data(func, data_generator, subkey=''):
+ if not ANKNOTES.CACHE_SEARCHES:
+ return data_generator()
+ if subkey:
+ subkey += '_'
+ if not hasattr(func, subkey + 'data') or getattr(func, subkey + 'update') < lastHierarchyUpdate:
+ setattr(func, subkey + 'data', data_generator())
+ setattr(func, subkey + 'update', datetime.now())
+ return getattr(func, subkey + 'data')
+
+
+def get_anknotes_root_notes_guids(column='guid', tag=None):
+ sql = "SELECT %s FROM {n} WHERE UPPER(title) IN {pred}" % column
+ data_key = column
+ if tag:
+ sql += " AND tagNames LIKE '%%,%s,%%'" % tag; data_key += '-' + tag
+
+ def cmd():
+ titles = get_anknotes_potential_root_titles(upper_case=False, encode=False)
+ return execute_sqlite_in_query(sql, titles, pred='UPPER(?)')
+ return get_cached_data(get_anknotes_root_notes_guids, cmd, data_key)
+
+
+def get_anknotes_root_notes_titles():
+ return get_cached_data(get_anknotes_root_notes_titles,
+ lambda: get_evernote_titles(get_anknotes_root_notes_guids()))
+
+
+def get_anknotes_potential_root_titles(upper_case=False, encode=False, **kwargs):
+ global generateTOCTitle
+ from anknotes.EvernoteNoteTitle import generateTOCTitle
+ def mapper(x): return generateTOCTitle(x)
+ if upper_case:
+ mapper = lambda x, f=mapper: f(x).upper()
+ if encode:
+ mapper = lambda x, f=mapper: encode(f(x))
+ data = get_cached_data(get_anknotes_potential_root_titles, lambda: ankDB().list(
+ "SELECT DISTINCT SUBSTR(title, 0, INSTR(title, ':')) FROM {n} WHERE title LIKE '%:%'"))
+ return map(mapper, data)
+
+
+# def __get_anknotes_root_notes_titles_query():
+# return '(%s)' % ' OR '.join(["title LIKE '%s'" % (escape_text_sql(x) + ':%') for x in get_anknotes_root_notes_titles()])
+
+def __get_anknotes_root_notes_pred(base=None, column='guid', **kwargs):
+ if base is None:
+ base = "SELECT %(column)s FROM %(table)s WHERE {pred} "
+ base = base % {'column': column, 'table': TABLES.EVERNOTE.NOTES}
+ pred = "title LIKE ? || ':%'"
+ return execute_sqlite_query(base, get_anknotes_root_notes_titles(), pred=pred)
+
+
+def execute_sqlite_in_query(sql, data, in_query=True, **kwargs):
+ return execute_sqlite_query(sql, data, in_query=True, **kwargs)
+
+
+def execute_sqlite_query(sql, data, in_query=False, **kwargs):
+ queries = generate_sqlite_in_predicate(data, **kwargs) if in_query else generate_sqlite_predicate(data, **kwargs)
+ results = []
+ db = ankDB()
+ for query, data in queries:
+ sql = fmt(sql, pred=query)
+ result = db.list(sql, *data)
+ log_sql('FROM execute_sqlite_query ' + sql,
+ ['Data [%d]: ' % len(data), data,result[:3]])
+ results += result
+ return results
+
+
+def generate_sqlite_predicate(data, pred='?', pred_delim=' OR ', query_base='(%s)', max_round=990):
+ if not query_base:
+ query_base = '%s'
+ length = len(data)
+ rounds = float(length) / max_round
+ rounds = int(rounds) + 1 if int(rounds) < rounds else 0
+ queries = []
+ for i in range(0, rounds):
+ start = max_round * i
+ end = min(length, start + max_round)
+ # log_sql('FROM generate_sqlite_predicate ' + query_base, ['gen sql #%d of %d: %d-%d' % (i, rounds, start, end) , pred_delim, 'Data [%d]: ' % len(data), data[:3]])
+ queries.append([query_base % (pred + (pred_delim + pred) * (end - start - 1)), data[start:end]])
+ return queries
+
+
+def generate_sqlite_in_predicate(data, pred='?', pred_delim=', ', query_base='(%s)'):
+ return generate_sqlite_predicate(data, pred=pred, query_base=query_base, pred_delim=pred_delim)
+
+
+def get_sql_anki_cids_from_evernote_guids(guids):
+ return "c.nid IN " + ids2str(get_anki_note_ids_from_evernote_guids(guids))
+
+
+def get_anknotes_child_notes_nids(**kwargs):
+ if 'column' in kwargs:
+ del kwargs['column']
+ return get_anknotes_child_notes(column='nid', **kwargs)
+
+
+def get_anknotes_child_notes(column='guid', **kwargs):
+ return get_cached_data(get_anknotes_child_notes, lambda: __get_anknotes_root_notes_pred(column=column, **kwargs),
+ column)
+
+
+def get_anknotes_orphan_notes_nids(**kwargs):
+ if 'column' in kwargs:
+ del kwargs['column']
+ return get_anknotes_orphan_notes(column='nid', **kwargs)
+
+
+def get_anknotes_orphan_notes(column='guid', **kwargs):
+ return get_cached_data(get_anknotes_orphan_notes, lambda: __get_anknotes_root_notes_pred(
+ "SELECT %(column)s FROM %(table)s WHERE title LIKE '%%:%%' AND NOT {pred}", column=column, **kwargs), column)
+
+
+def get_evernote_guid_from_anki_fields(fields):
+ if isinstance(fields, dict):
+ if not FIELDS.EVERNOTE_GUID in fields:
+ return None
+ return fields[FIELDS.EVERNOTE_GUID].replace(FIELDS.EVERNOTE_GUID_PREFIX, '')
+ if is_str(fields):
+ fields = splitFields(fields)
+ return fields[FIELDS.ORD.EVERNOTE_GUID].replace(FIELDS.EVERNOTE_GUID_PREFIX, '')
+
+
+def get_all_local_db_guids(filter=None):
+ if filter is None:
+ filter = "1"
+ return ankDB().list("SELECT guid FROM {n} WHERE %s ORDER BY title ASC" % filter)
+
+
+def get_evernote_model_ids(sql=False):
+ if not hasattr(get_evernote_model_ids, 'model_ids'):
+ from anknotes.Anki import Anki
+ anki = Anki()
+ anki.add_evernote_models(allowForceRebuild=False)
+ get_evernote_model_ids.model_ids = anki.evernoteModels
+ del anki
+ del Anki
+ if sql:
+ return 'n.mid IN (%s)' % ', '.join(get_evernote_model_ids.model_ids.values())
+ return get_evernote_model_ids.model_ids
+
+
+def update_anknotes_nids():
+ db = ankDB()
+ count = db.count('nid <= 0')
+ if not count:
+ return count
+ paired_data = db.all("SELECT n.id, n.flds FROM notes n WHERE " + get_evernote_model_ids(True))
+ paired_data = [[nid, get_evernote_guid_from_anki_fields(flds)] for nid, flds in paired_data]
+ db.executemany('UPDATE {n} SET nid = ? WHERE guid = ?', paired_data)
+ db.commit()
+ return count
+
+
+class ank_DB(object):
+ echo = False
+
+ def __init__(self, path=None, text=None, timeout=0, init_db=True, table=None):
+ self._table_ = table
+ self.ankdb_lastquery = None
+ self.echo = False
+ if not init_db:
+ return
+ encpath = path
+ if isinstance(encpath, unicode):
+ encpath = encode(path)
+ if path:
+ log('Creating local ankDB instance from path: ' + path, 'sql\\ankDB')
+ self._db = sqlite.connect(encpath, timeout=timeout)
+ self._db.row_factory = sqlite.Row
+ if text:
+ self._db.text_factory = text
+ self._path = path
+ else:
+ log('Creating local ankDB instance from Anki DB instance at: ' + mw.col.db._path, 'sql\\ankDB')
+ self._db = mw.col.db._db
+ """
+ :type : sqlite.Connection
+ """
+ self._db.row_factory = sqlite.Row
+ self._path = mw.col.db._path
+ # self._db = self._get_db_(**kw)
+
+ @property
+ def table(self):
+ return self._table_ if self._table_ else TABLES.EVERNOTE.NOTES
+
+ def setrowfactory(self):
+ self._db.row_factory = sqlite.Row
+
+ def drop(self, table):
+ self.execute("DROP TABLE IF EXISTS " + table)
+
+ @staticmethod
+ def _is_stmt_(sql, stmts=None):
+ s = sql.strip().lower()
+ stmts = ["insert", "update", "delete", "drop", "create", "replace"] + item_to_list(stmts)
+ for stmt in stmts:
+ if s.startswith(stmt):
+ return True
+ return False
+
+ def update(self, sql=None, *a, **ka):
+ if 'where' in ka:
+ ka['columns'] = sql
+ sql = None
+ if sql is None:
+ sql = '{columns} WHERE {where}'
+ sql = "UPDATE {t} SET " + sql
+ self.execute(sql, a, ka)
+
+ def delete(self, sql, *a, **ka):
+ sql = "DELETE FROM {t} WHERE " + sql
+ self.execute(sql, a, ka)
+
+ def insert(self, auto, replace_into=False, **kw):
+ keys = auto.keys()
+ values = [":%s" % key for key in keys]
+ keys = ["'%s'" % key for key in keys]
+ sql = 'INSERT%s INTO {t}(%s) VALUES(%s)' % (' OR REPLACE' if replace_into else '',
+ ', '.join(keys), ', '.join(values))
+ self.execute(sql, auto=auto, kw=kw)
+
+ def insert_or_replace(self, *a, **kw):
+ kw['replace_into'] = True
+ self.insert(*a, **kw)
+
+ def execute(self, sql, a=None, kw=None, auto=None, **kwargs):
+ if is_dict_type(a):
+ kw, a = a, kw
+ if not is_seq_type(a):
+ a = item_to_list(a)
+ if is_dict_type(sql):
+ auto = sql
+ sql = ' AND '.join(["`{0}` = :{0}".format(key) for key in auto.keys()])
+ if kw is None:
+ kw = {}
+ kwargs.update(kw)
+ sql = self._create_query_(sql, **kwargs)
+ if auto:
+ kw = auto
+ log_sql(sql, a, kw, self=self)
+ self.ankdb_lastquery = sql
+ if self._is_stmt_(sql):
+ self.mod = True
+ t = time.time()
+ try:
+ if a:
+ # execute("...where id = ?", 5)
+ res = self._db.execute(sql, a)
+ elif kw:
+ # execute("...where id = :id", id=5)
+ res = self._db.execute(sql, kw)
+ else:
+ res = self._db.execute(sql)
+ except (sqlite.OperationalError, sqlite.ProgrammingError, sqlite.Error, Exception) as e:
+ log_sql(sql, a, kw, self=self, filter_disabled=False)
+ import traceback
+ log_error('Error with ankDB().execute(): %s\n Query: %s\n Trace: %s' %
+ (str(e), sql, traceback.format_exc()))
+ raise
+ if self.echo:
+ # print a, ka
+ print sql, "%0.3fms" % ((time.time() - t) * 1000)
+ if self.echo == "2":
+ print a, kw
+ return res
+
+ def _fmt_query_(self, sql, **kw):
+ formats = dict(table=self.table, where='1', columns='*')
+ override = dict(n=TABLES.EVERNOTE.NOTES, s=TABLES.SEE_ALSO, a=TABLES.TOC_AUTO,
+ nv=TABLES.NOTE_VALIDATION_QUEUE, nb=TABLES.EVERNOTE.NOTEBOOKS, tt=TABLES.EVERNOTE.TAGS,
+ t_toc='%%,%s,%%' % TAGS.TOC, t_tauto='%%,%s,%%' % TAGS.TOC_AUTO,
+ t_out='%%,%s,%%' % TAGS.OUTLINE, anki_guid='{guid_prefix}{guid}%',
+ guid_prefix=FIELDS.EVERNOTE_GUID_PREFIX)
+ keys = formats.keys()
+ formats.update(kw)
+ formats['t'] = formats['table']
+ formats.update(override)
+ sql = fmt(sql, formats)
+ for key in keys:
+ if key in kw:
+ del kw[key]
+ return sql
+
+ def _create_query_(self, sql, **kw):
+ if not self._is_stmt_(sql, 'select'):
+ sql = 'SELECT {columns} FROM {t} WHERE ' + sql
+ sql = self._fmt_query_(sql, **kw)
+ if 'order' in kw and 'order by' not in sql.lower():
+ sql += ' ORDER BY ' + kw['order']
+ del kw['order']
+ return sql
+
+ def executemany(self, sql, data, **kw):
+ sql = self._create_query_(sql, **kw)
+ log_sql(sql, data, self=self)
+ self.mod = True
+ t = time.time()
+ try:
+ self._db.executemany(sql, data)
+ except (sqlite.OperationalError, sqlite.ProgrammingError, sqlite.Error, Exception) as e:
+ log_sql(sql, data, self=self, filter_disabled=False)
+ import traceback
+ log_error('Error with ankDB().executemany(): %s\n Query: %s\n Trace: %s' % (str(e), sql, traceback.format_exc()))
+ raise
+ if self.echo:
+ print sql, "%0.3fms" % ((time.time() - t) * 1000)
+ if self.echo == "2":
+ print data
+
+ def commit(self):
+ t = time.time()
+ self._db.commit()
+ if self.echo:
+ print "commit %0.3fms" % ((time.time() - t) * 1000)
+
+ def executescript(self, sql):
+ self.mod = True
+ if self.echo:
+ print sql
+ self._db.executescript(sql)
+
+ def rollback(self):
+ self._db.rollback()
+
+ def exists(self, *a, **kw):
+ count = self.count(*a, **kw)
+ return count is not None and count > 0
+
+ def count(self, *a, **kw):
+ return self.scalar('SELECT COUNT(*) FROM {t} WHERE {where}', *a, **kw)
+
+ def scalar(self, sql='1', *a, **kw):
+ log_text = 'Call to DB.ankdb_scalar():'
+ if not isinstance(self, ank_DB):
+ log_text += '\n - Self: ' + pf(self)
+ if a:
+ log_text += '\n - Args: ' + pf(a)
+ if kw:
+ log_text += '\n - KWArgs: ' + pf(kw)
+ last_query=''
+ if hasattr(self, 'ankdb_lastquery'):
+ last_query = self.ankdb_lastquery
+ if is_str(last_query):
+ last_query = last_query[:50]
+ else:
+ last_query = pf(last_query)
+ log_text += '\n - Last Query: ' + last_query
+ log(log_text + '\n', 'sql\\ankdb_scalar')
+ try:
+ res = self.execute(sql, a, kw)
+ except TypeError as e:
+ log(" > ERROR with ankdb_scalar while executing query: %s\n > LAST QUERY: %s" % (str(e), last_query), 'sql\\ankdb_scalar', crosspost='sql\\ankdb_scalar-error')
+ raise
+ if not isinstance(res, sqlite.Cursor):
+ log(' > Cursor: %s' % pf(res), 'sql\\ankdb_scalar')
+ try:
+ res = res.fetchone()
+ except TypeError as e:
+ log(" > ERROR with ankdb_scalar while fetching result: %s\n > LAST QUERY: %s" % (str(e), last_query), 'sql\\ankdb_scalar', crosspost='sql\\ankdb_scalar-error')
+ raise
+ log_blank('sql\\ankdb_scalar')
+ if res:
+ return res[0]
+ return None
+
+ def all(self, sql='1', *a, **kw):
+ return self.execute(sql, a, kw).fetchall()
+
+ def first(self, sql='1', *a, **kw):
+ c = self.execute(sql, a, kw)
+ res = c.fetchone()
+ c.close()
+ return res
+
+ def list(self, sql='1', *a, **kw):
+ return [x[0] for x in self.execute(sql, a, kw)]
+
+ def close(self):
+ self._db.close()
+
+ def set_progress_handler(self, *args):
+ self._db.set_progress_handler(*args)
+
+ def __enter__(self):
+ self._db.execute("begin")
+ return self
+
+ def __exit__(self, exc_type, *args):
+ self._db.close()
+
+ def totalChanges(self):
+ return self._db.total_changes
+
+ def interrupt(self):
+ self._db.interrupt()
+
+ def recreate(self, force=True, t='{t}'):
+ self.Init(t, force)
+
+ def InitTags(self, force=False):
+ if_exists = " IF NOT EXISTS" if not force else ""
+ log("Rebuilding %stags table" % ('*' if force else ''), 'sql\\ankDB')
+ self.execute(
+ """CREATE TABLE%s `%s` ( `guid` TEXT NOT NULL UNIQUE, `name` TEXT NOT NULL, `parentGuid` TEXT, `updateSequenceNum` INTEGER NOT NULL, PRIMARY KEY(guid) );""" % (
+ if_exists, TABLES.EVERNOTE.TAGS))
+
+ def InitNotebooks(self, force=False):
+ if_exists = " IF NOT EXISTS" if not force else ""
+ self.execute(
+ """CREATE TABLE%s `%s` ( `guid` TEXT NOT NULL UNIQUE, `name` TEXT NOT NULL, `updateSequenceNum` INTEGER NOT NULL, `serviceUpdated` INTEGER NOT NULL, `stack` TEXT, PRIMARY KEY(guid) );""" % (
+ if_exists, TABLES.EVERNOTE.NOTEBOOKS))
+
+ def InitSeeAlso(self, forceRebuild=False):
+ if_exists = " IF NOT EXISTS"
+ if forceRebuild:
+ self.drop(TABLES.SEE_ALSO)
+ self.commit()
+ if_exists = ""
+ self.execute(
+ """CREATE TABLE%s `%s` ( `id` INTEGER, `source_evernote_guid` TEXT NOT NULL, `number` INTEGER NOT NULL DEFAULT 100, `uid` INTEGER NOT NULL DEFAULT -1, `shard` TEXT NOT NULL DEFAULT -1, `target_evernote_guid` TEXT NOT NULL, `html` TEXT NOT NULL, `title` TEXT NOT NULL, `from_toc` INTEGER DEFAULT 0, `is_toc` INTEGER DEFAULT 0, `is_outline` INTEGER DEFAULT 0, PRIMARY KEY(id), unique(source_evernote_guid, target_evernote_guid) );""" % (
+ if_exists, TABLES.SEE_ALSO))
+
+ def Init(self, table='*', force=False):
+ table = self._fmt_query_(table)
+ log("Rebuilding tables: %s" % table, 'sql\\ankDB')
+ if table == '*' or table == TABLES.EVERNOTE.NOTES:
+ self.execute(
+ """CREATE TABLE IF NOT EXISTS `{n}` ( `guid` TEXT NOT NULL UNIQUE, `nid` INTEGER NOT NULL DEFAULT -1, `title` TEXT NOT NULL, `content` TEXT NOT NULL, `updated` INTEGER NOT NULL, `created` INTEGER NOT NULL, `updateSequenceNum` INTEGER NOT NULL, `notebookGuid` TEXT NOT NULL, `tagGuids` TEXT NOT NULL, `tagNames` TEXT NOT NULL, PRIMARY KEY(guid) );""")
+ if table == '*' or table == TABLES.EVERNOTE.NOTES_HISTORY:
+ self.execute(
+ """CREATE TABLE IF NOT EXISTS `%s` ( `guid` TEXT NOT NULL, `title` TEXT NOT NULL, `content` TEXT NOT NULL, `updated` INTEGER NOT NULL, `created` INTEGER NOT NULL, `updateSequenceNum` INTEGER NOT NULL, `notebookGuid` TEXT NOT NULL, `tagGuids` TEXT NOT NULL, `tagNames` TEXT NOT NULL)""" % TABLES.EVERNOTE.NOTES_HISTORY)
+ if table == '*' or table == TABLES.TOC_AUTO:
+ self.execute(
+ """CREATE TABLE IF NOT EXISTS `%s` ( `root_title` TEXT NOT NULL UNIQUE, `contents` TEXT NOT NULL, `tagNames` TEXT NOT NULL, `notebookGuid` TEXT NOT NULL, PRIMARY KEY(root_title) );""" % TABLES.TOC_AUTO)
+ if table == '*' or table == TABLES.NOTE_VALIDATION_QUEUE:
+ self.execute(
+ """CREATE TABLE IF NOT EXISTS `%s` ( `guid` TEXT, `title` TEXT NOT NULL, `contents` TEXT NOT NULL, `tagNames` TEXT NOT NULL DEFAULT ',,', `notebookGuid` TEXT, `validation_status` INTEGER NOT NULL DEFAULT 0, `validation_result` TEXT, `noteType` TEXT);""" % TABLES.NOTE_VALIDATION_QUEUE)
+ if table == '*' or table == TABLES.SEE_ALSO:
+ self.InitSeeAlso(force)
+ if table == '*' or table == TABLES.EVERNOTE.TAGS:
+ self.InitTags(force)
+ if table == '*' or table == TABLES.EVERNOTE.NOTEBOOKS:
+ self.InitNotebooks(force)
diff --git a/anknotes/detect_see_also_changes.py b/anknotes/detect_see_also_changes.py
new file mode 100644
index 0000000..907e15d
--- /dev/null
+++ b/anknotes/detect_see_also_changes.py
@@ -0,0 +1,308 @@
+# -*- coding: utf-8 -*-
+import shutil
+import sys
+
+try:
+ from pysqlite2 import dbapi2 as sqlite
+except ImportError:
+ from sqlite3 import dbapi2 as sqlite
+
+from anknotes.shared import *
+from anknotes import stopwatch
+
+from anknotes.EvernoteNotePrototype import EvernoteNotePrototype
+from anknotes.AnkiNotePrototype import AnkiNotePrototype
+from enum import Enum
+from anknotes.enums import *
+from anknotes.structs import EvernoteAPIStatus
+
+Error = sqlite.Error
+ankDBSetLocal()
+from anknotes.ankEvernote import Evernote
+from anknotes.Anki import Anki
+
+
+class notes:
+ class version(object):
+ class pstrings:
+ __updated = None
+ __processed = None
+ __original = None
+ __regex_updated = None
+ """: type : notes.version.see_also_match """
+ __regex_processed = None
+ """: type : notes.version.see_also_match """
+ __regex_original = None
+ """: type : notes.version.see_also_match """
+
+ @property
+ def regex_original(self):
+ if self.original is None:
+ return None
+ if self.__regex_original is None:
+ self.__regex_original = notes.version.see_also_match(self.original)
+ return self.__regex_original
+
+ @property
+ def regex_processed(self):
+ if self.processed is None:
+ return None
+ if self.__regex_processed is None:
+ self.__regex_processed = notes.version.see_also_match(self.processed)
+ return self.__regex_processed
+
+ @property
+ def regex_updated(self):
+ if self.updated is None:
+ return None
+ if self.__regex_updated is None:
+ self.__regex_updated = notes.version.see_also_match(self.updated)
+ return self.__regex_updated
+
+ @property
+ def processed(self):
+ if self.__processed is None:
+ self.__processed = str_process(self.original)
+ return self.__processed
+
+ @property
+ def updated(self):
+ if self.__updated is None:
+ return str_process(self.__original)
+ return self.__updated
+
+ @updated.setter
+ def updated(self, value):
+ self.__regex_updated = None
+ self.__updated = value
+
+ @property
+ def final(self):
+ return str_process_full(self.updated)
+
+ @property
+ def original(self):
+ return self.__original
+
+ def useProcessed(self):
+ self.updated = self.processed
+
+ def __init__(self, original=None):
+ self.__original = original
+
+ class see_also_match(object):
+ __subject = None
+ __content = None
+ __matchobject = None
+ """:type : anknotes._re.__Match """
+ __match_attempted = 0
+
+ @property
+ def subject(self):
+ if not self.__subject:
+ return self.content
+ return self.__subject
+
+ @subject.setter
+ def subject(self, value):
+ self.__subject = value
+ self.__match_attempted = 0
+ self.__matchobject = None
+
+ @property
+ def content(self):
+ return self.__content
+
+ def groups(self, group=0):
+ """
+ :param group:
+ :type group : int | str | unicode
+ :return:
+ """
+ if not self.successful_match:
+ return None
+ return self.__matchobject.group(group)
+
+ @property
+ def successful_match(self):
+ if self.__matchobject:
+ return True
+ if self.__match_attempted is 0 and self.subject is not None:
+ self.__matchobject = notes.rgx.search(self.subject)
+ """:type : anknotes._re.__Match """
+ self.__match_attempted += 1
+ return self.__matchobject is not None
+
+ @property
+ def main(self):
+ return self.groups(0)
+
+ @property
+ def see_also(self):
+ return self.groups('SeeAlso')
+
+ @property
+ def see_also_content(self):
+ return self.groups('SeeAlsoContent')
+
+ def __init__(self, content=None):
+ """
+
+ :type content: str | unicode
+ """
+ self.__content = content
+ self.__match_attempted = 0
+ self.__matchobject = None
+ """:type : anknotes._re.__Match """
+
+ content = pstrings()
+ see_also = pstrings()
+
+ old = version()
+ new = version()
+ rgx = regex_see_also()
+ match_type = 'NA'
+
+
+def str_process(str_):
+ if not str_:
+ return str_
+ str_ = str_.replace(u"evernote:///", u"evernote://")
+ str_ = re.sub(r'https://www.evernote.com/shard/(s\d+)/[\w\d]+/(\d+)/([\w\d\-]+)',
+ r'evernote://view/\2/\1/\3/\3/', str_)
+ str_ = str_.replace(u"evernote://", u"evernote:///").replace(u'
', u'
')
+ str_ = re.sub(r'
', u'
', str_, 0, re.IGNORECASE)
+ str_ = re.sub(r'(?s)<<(?P(?: )?)