From cec0a4572ad851dcf618afaf18284440164ee628 Mon Sep 17 00:00:00 2001 From: Jeckelmann Manuel Date: Mon, 11 Sep 2017 11:24:08 +0200 Subject: [PATCH 1/4] Integrated a PEP8 check into the testing pipeline --- .travis.yml | 1 + tox.ini | 3 +++ 2 files changed, 4 insertions(+) diff --git a/.travis.yml b/.travis.yml index d0c14cb..c6f80da 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,6 +10,7 @@ env: - TOX_ENV=django110-sqlite - TOX_ENV=django111-pg - TOX_ENV=django111-sqlite + - TOX_ENV=pep8 # Enable PostgreSQL usage diff --git a/tox.ini b/tox.ini index a6ac18a..f3b9ba2 100644 --- a/tox.ini +++ b/tox.ini @@ -6,10 +6,12 @@ [tox] envlist = django{19,110,111}-{sqlite,pg} + pep8 [testenv] deps = coverage + pep8: pep8 django19: django>=1.9,<1.10 django110: django>=1.10,<1.11 django111: django>=1.11,<1.12 @@ -17,3 +19,4 @@ deps = commands = pg: coverage run --source=versions ./manage.py test --settings={env:TOX_PG_CONF:cleanerversion.settings.pg} sqlite: coverage run --source=versions ./manage.py test --settings=cleanerversion.settings.sqlite + pep8: pep8 From 5e4aea888d7fa1192488d6fe7946b703d2f68b9e Mon Sep 17 00:00:00 2001 From: Jeckelmann Manuel Date: Mon, 11 Sep 2017 11:24:36 +0200 Subject: [PATCH 2/4] PEP8 formatting; no functional changes --- cleanerversion/__init__.py | 1 + cleanerversion/settings/base.py | 5 +- docs/conf.py | 119 ++- manage.py | 3 +- setup.py | 12 +- versions/admin.py | 110 ++- versions/deletion.py | 84 +- versions/descriptors.py | 279 ++++--- versions/fields.py | 147 ++-- versions/models.py | 521 ++++++++----- versions/settings.py | 25 +- versions/util/helper.py | 16 +- versions/util/postgresql.py | 81 +- versions_tests/__init__.py | 2 +- versions_tests/admin.py | 6 +- versions_tests/apps.py | 6 +- versions_tests/models.py | 25 +- versions_tests/tests/test_models.py | 1122 +++++++++++++++++---------- versions_tests/tests/test_utils.py | 87 ++- 19 files changed, 1667 insertions(+), 984 deletions(-) diff --git a/cleanerversion/__init__.py b/cleanerversion/__init__.py index 02b88f8..5ddb446 100644 --- a/cleanerversion/__init__.py +++ b/cleanerversion/__init__.py @@ -1,5 +1,6 @@ VERSION = (2, 0, 0) + def get_version(positions=None): version = VERSION if positions and isinstance(positions, int): diff --git a/cleanerversion/settings/base.py b/cleanerversion/settings/base.py index 940b815..22864c8 100644 --- a/cleanerversion/settings/base.py +++ b/cleanerversion/settings/base.py @@ -28,8 +28,9 @@ 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { - 'context_processors': ['django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages'], + 'context_processors': [ + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages'], }, }, ] diff --git a/docs/conf.py b/docs/conf.py index e06db50..dfbf327 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,13 +12,13 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) +# sys.path.insert(0, os.path.abspath('.')) import cleanerversion sys.path.insert(len(sys.path), os.path.abspath('..')) @@ -26,7 +26,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -46,7 +46,7 @@ source_suffix = '.rst' # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' @@ -71,9 +71,9 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -81,27 +81,27 @@ # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # -- Options for HTML output ---------------------------------------------- @@ -113,123 +113,122 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['_static'] +# html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'CleanerVersionDoc' - # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', -# Additional stuff for the LaTeX preamble. -#'preamble': '', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'CleanerVersion.tex', u'CleanerVersion Documentation', - u'Jean-Christophe Zulian, Brian King, Andrea Marcacci, Manuel Jeckelmann', - 'manual'), + ('index', 'CleanerVersion.tex', u'CleanerVersion Documentation', + u'Jean-Christophe Zulian, Brian King, Andrea Marcacci, Manuel Jeckelmann', + 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- @@ -246,7 +245,7 @@ ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -255,26 +254,26 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', - 'CleanerVersion', - u'CleanerVersion Documentation', - u'Jean-Christophe Zulian, Brian King, Andrea Marcacci, Manuel Jeckelmann', - 'CleanerVersion', - 'One line description of project.', - 'Miscellaneous'), + ('index', + 'CleanerVersion', + u'CleanerVersion Documentation', + u'Jean-Christophe Zulian, Brian King, Andrea Marcacci, Manuel Jeckelmann', + 'CleanerVersion', + 'One line description of project.', + 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. diff --git a/manage.py b/manage.py index 07c991d..ffaf1ef 100755 --- a/manage.py +++ b/manage.py @@ -3,7 +3,8 @@ import sys if __name__ == "__main__": - os.environ.setdefault("DJANGO_SETTINGS_MODULE", 'cleanerversion.settings.base') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", + 'cleanerversion.settings.base') from django.core.management import execute_from_command_line diff --git a/setup.py b/setup.py index 98c0ce1..b6410a2 100644 --- a/setup.py +++ b/setup.py @@ -4,25 +4,25 @@ import cleanerversion """ -Documentation can be found at https://docs.python.org/2/distutils/index.html, -but usually you only need to do the following steps to publish a new package +Documentation can be found at https://docs.python.org/2/distutils/index.html, +but usually you only need to do the following steps to publish a new package version to PyPI:: # Update the version tag in this file (setup.py) - python setup.py sdist --formats=gztar,zip + python setup.py sdist --formats=gztar,zip twine upload dist/* -That's already it. You should get the following output written to your +That's already it. You should get the following output written to your command line:: Server response (200): OK If you get errors, check the following things: -- Are you behind a proxy? --> Try not to be behind a proxy (I don't actually +- Are you behind a proxy? --> Try not to be behind a proxy (I don't actually know how to configure setup.py to be proxy-aware) - Is your command correct? --> Double-check using the reference documentation -- Do you have all the necessary libraries to generate the wanted formats? --> +- Do you have all the necessary libraries to generate the wanted formats? --> Reduce the set of formats or install libs """ diff --git a/versions/admin.py b/versions/admin.py index e33b85a..16b3b56 100644 --- a/versions/admin.py +++ b/versions/admin.py @@ -1,18 +1,20 @@ -from django.contrib.admin.widgets import AdminSplitDateTime -from django.contrib.admin.checks import ModelAdminChecks -from django.contrib import admin -from django.contrib.admin.utils import unquote +from datetime import datetime + from django import forms -from django.contrib.admin.templatetags.admin_static import static -from django.http import HttpResponseRedirect from django.conf.urls import url +from django.contrib import admin +from django.contrib.admin.checks import ModelAdminChecks +from django.contrib.admin.options import get_content_type_for_model +from django.contrib.admin.templatetags.admin_static import static +from django.contrib.admin.utils import unquote +from django.contrib.admin.widgets import AdminSplitDateTime from django.core.exceptions import PermissionDenied +from django.http import HttpResponseRedirect from django.shortcuts import get_object_or_404 -from django.contrib.admin.options import get_content_type_for_model +from django.template.response import TemplateResponse from django.utils.encoding import force_text from django.utils.text import capfirst -from django.template.response import TemplateResponse -from datetime import datetime + class DateTimeFilterForm(forms.Form): def __init__(self, request, *args, **kwargs): @@ -54,7 +56,8 @@ def __init__(self, field, request, params, model, model_admin, field_path): self.field_path = field_path self.lookup_kwarg_as_ofdate = '%s_as_of_0' % field_path self.lookup_kwarg_as_oftime = '%s_as_of_1' % field_path - super(DateTimeFilter, self).__init__(field, request, params, model, model_admin, field_path) + super(DateTimeFilter, self).__init__(field, request, params, model, + model_admin, field_path) self.form = self.get_form(request) def choices(self, cl): @@ -64,12 +67,14 @@ def expected_parameters(self): return [self.lookup_kwarg_as_ofdate, self.lookup_kwarg_as_oftime] def get_form(self, request): - return DateTimeFilterForm(request, data=self.used_parameters, field_name=self.field_path) + return DateTimeFilterForm(request, data=self.used_parameters, + field_name=self.field_path) def queryset(self, request, queryset): fieldname = '%s_as_of' % self.field_path if self.form.is_valid() and fieldname in self.form.cleaned_data: - filter_params = self.form.cleaned_data.get(fieldname, datetime.utcnow()) + filter_params = self.form.cleaned_data.get(fieldname, + datetime.utcnow()) return queryset.as_of(filter_params) else: return queryset @@ -82,7 +87,8 @@ class IsCurrentFilter(admin.SimpleListFilter): def __init__(self, request, params, model, model_admin): self.lookup_kwarg = 'is_current' self.lookup_val = request.GET.get(self.lookup_kwarg, None) - super(IsCurrentFilter, self).__init__(request, params, model, model_admin) + super(IsCurrentFilter, self).__init__(request, params, model, + model_admin) def lookups(self, request, model_admin): return [(None, 'All'), ('1', 'Current'), ] @@ -107,18 +113,21 @@ def queryset(self, request, queryset): class VersionedAdminChecks(ModelAdminChecks): def _check_exclude(self, cls, model=None): """ - Required to suppress error about exclude not being a tuple since we are using @property to dynamically change it + Required to suppress error about exclude not being a tuple since we + are using @property to dynamically change it """ return [] class VersionedAdmin(admin.ModelAdmin): """ - VersionedAdmin provides functionality to allow cloning of objects when saving, not cloning if a mistake was - made, and making a current object historical by deleting it + VersionedAdmin provides functionality to allow cloning of objects when + saving, not cloning if a mistake was made, and making a current object + historical by deleting it """ - VERSIONED_EXCLUDE = ['id', 'identity', 'version_end_date', 'version_start_date', 'version_birth_date'] + VERSIONED_EXCLUDE = ['id', 'identity', 'version_end_date', + 'version_start_date', 'version_birth_date'] # These are so that the subclasses can overwrite these attributes # to have the identity, end date, or start date column not show @@ -131,10 +140,12 @@ class VersionedAdmin(admin.ModelAdmin): def get_readonly_fields(self, request, obj=None): """ - This is required a subclass of VersionedAdmin has readonly_fields ours won't be undone + This is required a subclass of VersionedAdmin has readonly_fields + ours won't be undone """ if obj: - return list(self.readonly_fields) + ['id', 'identity', 'is_current'] + return list(self.readonly_fields) + ['id', 'identity', + 'is_current'] return self.readonly_fields def get_ordering(self, request): @@ -146,7 +157,8 @@ def get_list_display(self, request): """ # Force cast to list as super get_list_display could return a tuple - list_display = list(super(VersionedAdmin, self).get_list_display(request)) + list_display = list( + super(VersionedAdmin, self).get_list_display(request)) # Preprend the following fields to list display if self.list_display_show_identity: @@ -165,9 +177,10 @@ def get_list_filter(self, request): Adds versionable custom filtering ability to changelist """ list_filter = super(VersionedAdmin, self).get_list_filter(request) - return list(list_filter) + [('version_start_date', DateTimeFilter), IsCurrentFilter] + return list(list_filter) + [('version_start_date', DateTimeFilter), + IsCurrentFilter] - def restore(self,request, *args, **kwargs): + def restore(self, request, *args, **kwargs): """ View for restoring object from change view """ @@ -175,7 +188,7 @@ def restore(self,request, *args, **kwargs): object_id_index = paths.index("restore") - 1 object_id = paths[object_id_index] - obj = super(VersionedAdmin,self).get_object(request, object_id) + obj = super(VersionedAdmin, self).get_object(request, object_id) obj.restore() admin_wordIndex = object_id_index - 3 path = "/%s" % ("/".join(paths[admin_wordIndex:object_id_index])) @@ -186,19 +199,21 @@ def will_not_clone(self, request, *args, **kwargs): Add save but not clone capability in the changeview """ paths = request.path_info.split('/') - index_of_object_id = paths.index("will_not_clone")-1 + index_of_object_id = paths.index("will_not_clone") - 1 object_id = paths[index_of_object_id] self.change_view(request, object_id) - admin_wordInUrl = index_of_object_id-3 - # This gets the adminsite for the app, and the model name and joins together with / + admin_wordInUrl = index_of_object_id - 3 + # This gets the adminsite for the app, and the model name and joins + # together with / path = '/' + '/'.join(paths[admin_wordInUrl:index_of_object_id]) return HttpResponseRedirect(path) @property def exclude(self): """ - Custom descriptor for exclude since there is no get_exclude method to be overridden + Custom descriptor for exclude since there is no get_exclude method to + be overridden """ exclude = self.VERSIONED_EXCLUDE @@ -210,13 +225,21 @@ def exclude(self): def get_object(self, request, object_id, from_field=None): """ - our implementation of get_object allows for cloning when updating an object, not cloning when the button - 'save but not clone' is pushed and at no other time will clone be called + our implementation of get_object allows for cloning when updating an + object, not cloning when the button 'save but not clone' is pushed + and at no other time will clone be called """ - obj = super(VersionedAdmin, self).get_object(request, object_id) # from_field breaks in 1.7.8 - # Only clone if update view as get_object() is also called for change, delete, and history views - if request.method == 'POST' and obj and obj.is_latest and 'will_not_clone' not in request.path \ - and 'delete' not in request.path and 'restore' not in request.path: + # from_field breaks in 1.7.8 + obj = super(VersionedAdmin, self).get_object(request, + object_id) + # Only clone if update view as get_object() is also called for change, + # delete, and history views + if request.method == 'POST' and \ + obj and \ + obj.is_latest and \ + 'will_not_clone' not in request.path and \ + 'delete' not in request.path and \ + 'restore' not in request.path: obj = obj.clone() return obj @@ -226,7 +249,8 @@ def history_view(self, request, object_id, extra_context=None): from django.contrib.admin.models import LogEntry # First check if the user can see this history. model = self.model - obj = get_object_or_404(self.get_queryset(request), pk=unquote(object_id)) + obj = get_object_or_404(self.get_queryset(request), + pk=unquote(object_id)) if not self.has_change_permission(request, obj): raise PermissionDenied @@ -234,7 +258,8 @@ def history_view(self, request, object_id, extra_context=None): opts = model._meta app_label = opts.app_label action_list = LogEntry.objects.filter( - object_id=unquote(obj.identity), # this is the change for our override; + object_id=unquote(obj.identity), + # this is the change for our override; content_type=get_content_type_for_model(model) ).select_related().order_by('action_time') @@ -243,7 +268,8 @@ def history_view(self, request, object_id, extra_context=None): context = dict(ctx, title=('Change history: %s') % force_text(obj), action_list=action_list, - module_name=capfirst(force_text(opts.verbose_name_plural)), + module_name=capfirst( + force_text(opts.verbose_name_plural)), object=obj, opts=opts, preserved_filters=self.get_preserved_filters(request), @@ -259,9 +285,12 @@ def get_urls(self): """ Appends the custom will_not_clone url to the admin site """ - not_clone_url = [url(r'^(.+)/will_not_clone/$', admin.site.admin_view(self.will_not_clone))] - restore_url = [url(r'^(.+)/restore/$', admin.site.admin_view(self.restore))] - return not_clone_url + restore_url + super(VersionedAdmin, self).get_urls() + not_clone_url = [url(r'^(.+)/will_not_clone/$', + admin.site.admin_view(self.will_not_clone))] + restore_url = [ + url(r'^(.+)/restore/$', admin.site.admin_view(self.restore))] + return not_clone_url + restore_url + super(VersionedAdmin, + self).get_urls() def is_current(self, obj): return obj.is_current @@ -279,5 +308,6 @@ def identity_shortener(self, obj): identity_shortener.short_description = "Short Identity" class Media(): - # This supports dynamically adding 'Save without cloning' button: http://bit.ly/1T2fGOP + # This supports dynamically adding 'Save without cloning' button: + # http://bit.ly/1T2fGOP js = ('js/admin_addon.js',) diff --git a/versions/deletion.py b/versions/deletion.py index 819f1d6..dd2cc8d 100644 --- a/versions/deletion.py +++ b/versions/deletion.py @@ -16,13 +16,15 @@ class VersionedCollector(Collector): Since non-versionable and versionable objects can be related, the delete() method handles both of them. The standard Django behaviour is kept for - non-versionable objects. For versionable objects, no pre/post-delete signals - are sent. No signal is sent because the object is not being removed from the - database. If you want the standard signals to be sent, or custom signals, - create a subclass of this class and override versionable_pre_delete() and/or - versionable_post_delete(), and in your settings file specify the dotted path - to your custom class as a string, e.g.: - VERSIONED_DELETE_COLLECTOR_CLASS = 'myapp.deletion.CustomVersionedCollector' + non-versionable objects. For versionable objects, no pre/post-delete + signals are sent. No signal is sent because the object is not being + removed from the database. If you want the standard signals to be sent, + or custom signals, create a subclass of this class and override + versionable_pre_delete() and/or versionable_post_delete(), and in your + settings file specify the dotted path to your custom class as a + string, e.g.: + VERSIONED_DELETE_COLLECTOR_CLASS = + 'myapp.deletion.CustomVersionedCollector' """ def can_fast_delete(self, objs, from_field=None): @@ -30,7 +32,8 @@ def can_fast_delete(self, objs, from_field=None): return False def is_versionable(self, model): - return hasattr(model, 'VERSION_IDENTIFIER_FIELD') and hasattr(model, 'OBJECT_IDENTIFIER_FIELD') + return hasattr(model, 'VERSION_IDENTIFIER_FIELD') and \ + hasattr(model, 'OBJECT_IDENTIFIER_FIELD') def delete(self, timestamp): # sort instance collections @@ -38,8 +41,8 @@ def delete(self, timestamp): self.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that - # don't support transactions or cannot defer constraint checks until the - # end of a transaction. + # don't support transactions or cannot defer constraint checks until + # the end of a transaction. self.sort() with transaction.atomic(using=self.using, savepoint=False): @@ -47,7 +50,8 @@ def delete(self, timestamp): for model, obj in self.instances_with_model(): if not model._meta.auto_created: if self.is_versionable(model): - # By default, no signal is sent when deleting a Versionable. + # By default, no signal is sent when deleting a + # Versionable. self.versionable_pre_delete(obj, timestamp) else: signals.pre_delete.send( @@ -56,20 +60,26 @@ def delete(self, timestamp): # do not do fast deletes if self.fast_deletes: - raise RuntimeError("No fast_deletes should be present; they are not safe for Versionables") + raise RuntimeError("No fast_deletes should be present; " + "they are not safe for Versionables") # update fields - for model, instances_for_fieldvalues in six.iteritems(self.field_updates): + for model, instances_for_fieldvalues in six.iteritems( + self.field_updates): id_map = {} - for (field, value), instances in six.iteritems(instances_for_fieldvalues): + for (field, value), instances in six.iteritems( + instances_for_fieldvalues): if self.is_versionable(model): - # Do not set the foreign key to null, which can be the behaviour (depending on DB backend) - # for the default CASCADE on_delete method. - # In the case of a SET.. method, clone before changing the value (if it hasn't already been - # cloned) + # Do not set the foreign key to null, which can be the + # behaviour (depending on DB backend) for the default + # CASCADE on_delete method. + # In the case of a SET.. method, clone before + # changing the value (if it hasn't already been cloned) updated_instances = set() - if not ( - isinstance(field, versions.fields.VersionedForeignKey) and field.rel.on_delete == CASCADE): + if not (isinstance( + field, + versions.fields.VersionedForeignKey) and + field.rel.on_delete == CASCADE): for instance in instances: # Clone before updating cloned = id_map.get(instance.pk, None) @@ -77,8 +87,10 @@ def delete(self, timestamp): cloned = instance.clone() id_map[instance.pk] = cloned updated_instances.add(cloned) - # TODO: instance should get updated with new values from clone ? - instances_for_fieldvalues[(field, value)] = updated_instances + # TODO: instance should get updated with new + # values from clone ? + instances_for_fieldvalues[ + (field, value)] = updated_instances # Replace the instances with their clones in self.data, too model_instances = self.data.get(model, {}) @@ -88,9 +100,11 @@ def delete(self, timestamp): self.data[model][index] = cloned query = sql.UpdateQuery(model) - for (field, value), instances in six.iteritems(instances_for_fieldvalues): + for (field, value), instances in six.iteritems( + instances_for_fieldvalues): if instances: - query.update_batch([obj.pk for obj in instances], {field.name: value}, self.using) + query.update_batch([obj.pk for obj in instances], + {field.name: value}, self.using) # reverse instance collections for instances in six.itervalues(self.data): @@ -102,7 +116,8 @@ def delete(self, timestamp): for instance in instances: self.versionable_delete(instance, timestamp) if not model._meta.auto_created: - # By default, no signal is sent when deleting a Versionable. + # By default, no signal is sent when deleting a + # Versionable. self.versionable_post_delete(instance, timestamp) else: query = sql.DeleteQuery(model) @@ -116,12 +131,15 @@ def delete(self, timestamp): ) # update collected instances - for model, instances_for_fieldvalues in six.iteritems(self.field_updates): - for (field, value), instances in six.iteritems(instances_for_fieldvalues): + for model, instances_for_fieldvalues in six.iteritems( + self.field_updates): + for (field, value), instances in six.iteritems( + instances_for_fieldvalues): for obj in instances: setattr(obj, field.attname, value) - # Do not set Versionable object ids to None, since they still do have an id. + # Do not set Versionable object ids to None, since they still do have + # an id. # Instead, set their version_end_date. for model, instances in six.iteritems(self.data): is_versionable = self.is_versionable(model) @@ -133,8 +151,8 @@ def delete(self, timestamp): def related_objects(self, related, objs): """ - Gets a QuerySet of current objects related to ``objs`` via the relation ``related``. - + Gets a QuerySet of current objects related to ``objs`` via the + relation ``related``. """ from versions.models import Versionable @@ -148,7 +166,8 @@ def related_objects(self, related, objs): def versionable_pre_delete(self, instance, timestamp): """ - Override this method to implement custom behaviour. By default, does nothing. + Override this method to implement custom behaviour. By default, + does nothing. :param Versionable instance: :param datetime timestamp: @@ -157,7 +176,8 @@ def versionable_pre_delete(self, instance, timestamp): def versionable_post_delete(self, instance, timestamp): """ - Override this method to implement custom behaviour. By default, does nothing. + Override this method to implement custom behaviour. By default, + does nothing. :param Versionable instance: :param datetime timestamp: diff --git a/versions/descriptors.py b/versions/descriptors.py index 88ceaa6..f40c77f 100644 --- a/versions/descriptors.py +++ b/versions/descriptors.py @@ -3,9 +3,11 @@ from django.core.exceptions import SuspiciousOperation, FieldDoesNotExist from django.db import router, transaction from django.db.models.base import Model -from django.db.models.fields.related import (ForwardManyToOneDescriptor, ReverseManyToOneDescriptor, +from django.db.models.fields.related import (ForwardManyToOneDescriptor, + ReverseManyToOneDescriptor, ManyToManyDescriptor) -from django.db.models.fields.related_descriptors import create_forward_many_to_many_manager +from django.db.models.fields.related_descriptors import \ + create_forward_many_to_many_manager from django.db.models.query_utils import Q from django.utils.functional import cached_property @@ -25,13 +27,16 @@ def matches_querytime(instance, querytime): if not querytime.time: return instance.version_end_date is None - return (instance.version_start_date <= querytime.time - and (instance.version_end_date is None or instance.version_end_date > querytime.time)) + return (instance.version_start_date <= querytime.time and ( + instance.version_end_date is None or + instance.version_end_date > querytime.time)) class VersionedForwardManyToOneDescriptor(ForwardManyToOneDescriptor): """ - The VersionedForwardManyToOneDescriptor is used when pointing another Model using a VersionedForeignKey; + The VersionedForwardManyToOneDescriptor is used when pointing another + Model using a VersionedForeignKey; + For example: class Team(Versionable): @@ -52,18 +57,23 @@ def get_prefetch_queryset(self, instances, queryset=None): queryset = self.get_queryset() queryset._add_hints(instance=instances[0]) - # CleanerVersion change 1: force the querytime to be the same as the prefetched-for instance. - # This is necessary to have reliable results and avoid extra queries for cache misses when - # accessing the child objects from their parents (e.g. choice.poll). + # CleanerVersion change 1: force the querytime to be the same as the + # prefetched-for instance. + # This is necessary to have reliable results and avoid extra queries + # for cache misses when accessing the child objects from their + # parents (e.g. choice.poll). instance_querytime = instances[0]._querytime if instance_querytime.active: - if queryset.querytime.active and queryset.querytime.time != instance_querytime.time: - raise ValueError("A Prefetch queryset that specifies an as_of time must match " - "the as_of of the base queryset.") + if queryset.querytime.active and \ + queryset.querytime.time != instance_querytime.time: + raise ValueError( + "A Prefetch queryset that specifies an as_of time must " + "match the as_of of the base queryset.") else: queryset.querytime = instance_querytime - # CleanerVersion change 2: make rel_obj_attr return a tuple with the object's identity. + # CleanerVersion change 2: make rel_obj_attr return a tuple with + # the object's identity. # rel_obj_attr = self.field.get_foreign_related_value def versioned_fk_rel_obj_attr(versioned_rel_obj): return versioned_rel_obj.identity, @@ -71,9 +81,11 @@ def versioned_fk_rel_obj_attr(versioned_rel_obj): rel_obj_attr = versioned_fk_rel_obj_attr instance_attr = self.field.get_local_related_value instances_dict = {instance_attr(inst): inst for inst in instances} - # CleanerVersion change 3: fake the related field so that it provides a name of 'identity'. + # CleanerVersion change 3: fake the related field so that it provides + # a name of 'identity'. # related_field = self.field.foreign_related_fields[0] - related_field = namedtuple('VersionedRelatedFieldTuple', 'name')('identity') + related_field = namedtuple('VersionedRelatedFieldTuple', 'name')( + 'identity') # FIXME: This will need to be revisited when we introduce support for # composite fields. In the meantime we take this practical approach to @@ -81,9 +93,10 @@ def versioned_fk_rel_obj_attr(versioned_rel_obj): # (related_name ends with a '+'). Refs #21410. # The check for len(...) == 1 is a special case that allows the query # to be join-less and smaller. Refs #21760. - if self.field.rel.is_hidden() or len(self.field.foreign_related_fields) == 1: - query = {'%s__in' % related_field.name: set(instance_attr(inst)[0] for inst in instances)} - # query = {'identity__in': set(instance_attr(inst)[0] for inst in instances)} + if self.field.rel.is_hidden() or len( + self.field.foreign_related_fields) == 1: + query = {'%s__in' % related_field.name: set( + instance_attr(inst)[0] for inst in instances)} else: query = {'%s__in' % self.field.related_query_name(): instances} queryset = queryset.filter(**query) @@ -98,12 +111,14 @@ def versioned_fk_rel_obj_attr(versioned_rel_obj): return queryset, rel_obj_attr, instance_attr, True, self.cache_name def get_queryset(self, **hints): - queryset = super(VersionedForwardManyToOneDescriptor, self).get_queryset(**hints) + queryset = super(VersionedForwardManyToOneDescriptor, + self).get_queryset(**hints) if hasattr(queryset, 'querytime'): if 'instance' in hints: instance = hints['instance'] if hasattr(instance, '_querytime'): - if instance._querytime.active and instance._querytime != queryset.querytime: + if instance._querytime.active and \ + instance._querytime != queryset.querytime: queryset = queryset.as_of(instance._querytime.time) else: queryset = queryset.as_of(None) @@ -115,8 +130,10 @@ def get_queryset(self, **hints): def vforward_many_to_one_descriptor_getter(self, instance, instance_type=None): """ - The getter method returns the object, which points instance, e.g. choice.poll returns - a Poll instance, whereas the Poll class defines the ForeignKey. + The getter method returns the object, which points instance, + e.g. choice.poll returns a Poll instance, whereas the Poll class defines + the ForeignKey. + :param instance: The object on which the property was accessed :param instance_type: The type of the instance object :return: Returns a Versionable @@ -131,28 +148,33 @@ def vforward_many_to_one_descriptor_getter(self, instance, instance_type=None): return None if not isinstance(current_elt, Versionable): - raise TypeError("VersionedForeignKey target is of type " - + str(type(current_elt)) - + ", which is not a subclass of Versionable") + raise TypeError("VersionedForeignKey target is of type " + + str(type(current_elt)) + + ", which is not a subclass of Versionable") if hasattr(instance, '_querytime'): - # If current_elt matches the instance's querytime, there's no need to make a database query. + # If current_elt matches the instance's querytime, there's no need to + # make a database query. if matches_querytime(current_elt, instance._querytime): current_elt._querytime = instance._querytime return current_elt - return current_elt.__class__.objects.as_of(instance._querytime.time).get(identity=current_elt.identity) + return current_elt.__class__.objects.as_of( + instance._querytime.time).get(identity=current_elt.identity) else: - return current_elt.__class__.objects.current.get(identity=current_elt.identity) + return current_elt.__class__.objects.current.get( + identity=current_elt.identity) -vforward_many_to_one_descriptor_class.__get__ = vforward_many_to_one_descriptor_getter +vforward_many_to_one_descriptor_class.__get__ = \ + vforward_many_to_one_descriptor_getter class VersionedReverseManyToOneDescriptor(ReverseManyToOneDescriptor): @cached_property def related_manager_cls(self): - manager_cls = super(VersionedReverseManyToOneDescriptor, self).related_manager_cls + manager_cls = super(VersionedReverseManyToOneDescriptor, + self).related_manager_cls rel_field = self.field class VersionedRelatedManager(manager_cls): @@ -168,8 +190,9 @@ def get_queryset(self): from versions.models import VersionedQuerySet queryset = super(VersionedRelatedManager, self).get_queryset() - # Do not set the query time if it is already correctly set. queryset.as_of() returns a clone - # of the queryset, and this will destroy the prefetched objects cache if it exists. + # Do not set the query time if it is already correctly set. + # queryset.as_of() returns a clone of the queryset, and this + # will destroy the prefetched objects cache if it exists. if isinstance(queryset, VersionedQuerySet) \ and self.instance._querytime.active \ and queryset.querytime != self.instance._querytime: @@ -178,15 +201,19 @@ def get_queryset(self): def get_prefetch_queryset(self, instances, queryset=None): """ - Overrides RelatedManager's implementation of get_prefetch_queryset so that it works - nicely with VersionedQuerySets. It ensures that identities and time-limited where - clauses are used when selecting related reverse foreign key objects. + Overrides RelatedManager's implementation of + get_prefetch_queryset so that it works nicely with + VersionedQuerySets. It ensures that identities and time-limited + where clauses are used when selecting related reverse foreign + key objects. """ if queryset is None: - # Note that this intentionally call's VersionManager's get_queryset, instead of simply calling - # the superclasses' get_queryset (as the non-versioned RelatedManager does), because what is - # needed is a simple Versioned queryset without any restrictions (e.g. do not - # apply self.core_filters). + # Note that this intentionally call's VersionManager's + # get_queryset, instead of simply calling the superclasses' + # get_queryset (as the non-versioned RelatedManager does), + # because what is needed is a simple Versioned queryset + # without any restrictions (e.g. do not apply + # self.core_filters). from versions.models import VersionManager queryset = VersionManager.get_queryset(self) @@ -194,22 +221,26 @@ def get_prefetch_queryset(self, instances, queryset=None): queryset = queryset.using(queryset._db or self._db) instance_querytime = instances[0]._querytime if instance_querytime.active: - if queryset.querytime.active and queryset.querytime.time != instance_querytime.time: - raise ValueError("A Prefetch queryset that specifies an as_of time must match " - "the as_of of the base queryset.") + if queryset.querytime.active and \ + queryset.querytime.time != \ + instance_querytime.time: + raise ValueError( + "A Prefetch queryset that specifies an as_of time " + "must match the as_of of the base queryset.") else: queryset.querytime = instance_querytime rel_obj_attr = rel_field.get_local_related_value instance_attr = rel_field.get_foreign_related_value - # Use identities instead of ids so that this will work with versioned objects. + # Use identities instead of ids so that this will work with + # versioned objects. instances_dict = {(inst.identity,): inst for inst in instances} identities = [inst.identity for inst in instances] query = {'%s__identity__in' % rel_field.name: identities} queryset = queryset.filter(**query) - # Since we just bypassed this class' get_queryset(), we must manage - # the reverse relation manually. + # Since we just bypassed this class' get_queryset(), we must + # manage the reverse relation manually. for rel_obj in queryset: instance = instances_dict[rel_obj_attr(rel_obj)] setattr(rel_obj, rel_field.name, instance) @@ -221,19 +252,24 @@ def add(self, *objs, **kwargs): cloned_objs = () for obj in objs: if not isinstance(obj, Versionable): - raise TypeError("Trying to add a non-Versionable to a VersionedForeignKey relationship") + raise TypeError( + "Trying to add a non-Versionable to a " + "VersionedForeignKey relationship") cloned_objs += (obj.clone(),) - super(VersionedRelatedManager, self).add(*cloned_objs, **kwargs) + super(VersionedRelatedManager, self).add(*cloned_objs, + **kwargs) # clear() and remove() are present if the FK is nullable if 'clear' in dir(manager_cls): def clear(self, **kwargs): """ - Overridden to ensure that the current queryset is used, and to clone objects before they - are removed, so that history is not lost. + Overridden to ensure that the current queryset is used, + and to clone objects before they are removed, so that + history is not lost. """ bulk = kwargs.pop('bulk', True) - db = router.db_for_write(self.model, instance=self.instance) + db = router.db_for_write(self.model, + instance=self.instance) queryset = self.current.using(db) with transaction.atomic(using=db, savepoint=False): cloned_pks = [obj.clone().pk for obj in queryset] @@ -247,15 +283,18 @@ def remove(self, *objs, **kwargs): val = rel_field.get_foreign_related_value(self.instance) cloned_objs = () for obj in objs: - # Is obj actually part of this descriptor set? Otherwise, silently go over it, since Django + # Is obj actually part of this descriptor set? + # Otherwise, silently go over it, since Django # handles that case if rel_field.get_local_related_value(obj) == val: # Silently pass over non-versionable items if not isinstance(obj, Versionable): raise TypeError( - "Trying to remove a non-Versionable from a VersionedForeignKey realtionship") + "Trying to remove a non-Versionable from " + "a VersionedForeignKey realtionship") cloned_objs += (obj.clone(),) - super(VersionedRelatedManager, self).remove(*cloned_objs, **kwargs) + super(VersionedRelatedManager, self).remove(*cloned_objs, + **kwargs) return VersionedRelatedManager @@ -272,15 +311,20 @@ def related_manager_cls(self): def __set__(self, instance, value): """ - Completely overridden to avoid bulk deletion that happens when the parent method calls clear(). + Completely overridden to avoid bulk deletion that happens when the + parent method calls clear(). - The parent method's logic is basically: clear all in bulk, then add the given objects in bulk. - Instead, we figure out which ones are being added and removed, and call add and remove for these values. + The parent method's logic is basically: clear all in bulk, then add + the given objects in bulk. + Instead, we figure out which ones are being added and removed, and + call add and remove for these values. This lets us retain the versioning information. - Since this is a many-to-many relationship, it is assumed here that the django.db.models.deletion.Collector - logic, that is used in clear(), is not necessary here. Collector collects related models, e.g. ones that should - also be deleted because they have a ON CASCADE DELETE relationship to the object, or, in the case of + Since this is a many-to-many relationship, it is assumed here that + the django.db.models.deletion.Collector logic, that is used in + clear(), is not necessary here. Collector collects related models, + e.g. ones that should also be deleted because they have + a ON CASCADE DELETE relationship to the object, or, in the case of "Multi-table inheritance", are parent objects. :param instance: The instance on which the getter was called @@ -289,17 +333,22 @@ def __set__(self, instance, value): if not instance.is_current: raise SuspiciousOperation( - "Related values can only be directly set on the current version of an object") + "Related values can only be directly set on the current " + "version of an object") if not self.field.rel.through._meta.auto_created: opts = self.field.rel.through._meta - raise AttributeError(("Cannot set values on a ManyToManyField which specifies an intermediary model. " - "Use %s.%s's Manager instead.") % (opts.app_label, opts.object_name)) + raise AttributeError(( + "Cannot set values on a ManyToManyField " + "which specifies an intermediary model. " + "Use %s.%s's Manager instead.") % ( + opts.app_label, opts.object_name)) manager = self.__get__(instance) - # Below comment is from parent __set__ method. We'll force evaluation, too: - # clear() can change expected output of 'value' queryset, we force evaluation - # of queryset before clear; ticket #19816 + # Below comment is from parent __set__ method. We'll force + # evaluation, too: + # clear() can change expected output of 'value' queryset, we force + # evaluation of queryset before clear; ticket #19816 value = tuple(value) being_removed, being_added = self.get_current_m2m_diff(instance, value) @@ -310,7 +359,8 @@ def __set__(self, instance, value): def get_current_m2m_diff(self, instance, new_objects): """ :param instance: Versionable object - :param new_objects: objects which are about to be associated with instance + :param new_objects: objects which are about to be associated with + instance :return: (being_removed id list, being_added id list) :rtype : tuple """ @@ -334,22 +384,30 @@ def get_current_m2m_diff(self, instance, new_objects): def pks_from_objects(self, objects): """ - Extract all the primary key strings from the given objects. Objects may be Versionables, or bare primary keys. + Extract all the primary key strings from the given objects. + Objects may be Versionables, or bare primary keys. + :rtype : set """ return {o.pk if isinstance(o, Model) else o for o in objects} -def create_versioned_forward_many_to_many_manager(superclass, rel, reverse=None): - many_related_manager_klass = create_forward_many_to_many_manager(superclass, rel, reverse) +def create_versioned_forward_many_to_many_manager(superclass, rel, + reverse=None): + many_related_manager_klass = create_forward_many_to_many_manager( + superclass, rel, reverse) class VersionedManyRelatedManager(many_related_manager_klass): def __init__(self, *args, **kwargs): super(VersionedManyRelatedManager, self).__init__(*args, **kwargs) - # Additional core filters are: version_start_date <= t & (version_end_date > t | version_end_date IS NULL) - # but we cannot work with the Django core filters, since they don't support ORing filters, which - # is a thing we need to consider the "version_end_date IS NULL" case; - # So, we define our own set of core filters being applied when versioning + # Additional core filters are: + # version_start_date <= t & + # (version_end_date > t | version_end_date IS NULL) + # but we cannot work with the Django core filters, since they + # don't support ORing filters, which is a thing we need to + # consider the "version_end_date IS NULL" case; + # So, we define our own set of core filters being applied when + # versioning try: _ = self.through._meta.get_field('version_start_date') _ = self.through._meta.get_field('version_end_date') @@ -361,27 +419,31 @@ def __init__(self, *args, **kwargs): def get_queryset(self): """ - Add a filter to the queryset, limiting the results to be pointed by relationship that are - valid for the given timestamp (which is taken at the current instance, or set to now, if not - available). - Long story short, apply the temporal validity filter also to the intermediary model. + Add a filter to the queryset, limiting the results to be pointed + by relationship that are valid for the given timestamp (which is + taken at the current instance, or set to now, if not available). + Long story short, apply the temporal validity filter also to the + intermediary model. """ queryset = super(VersionedManyRelatedManager, self).get_queryset() if hasattr(queryset, 'querytime'): - if self.instance._querytime.active and self.instance._querytime != queryset.querytime: + if self.instance._querytime.active and \ + self.instance._querytime != queryset.querytime: queryset = queryset.as_of(self.instance._querytime.time) return queryset def _remove_items(self, source_field_name, target_field_name, *objs): """ - Instead of removing items, we simply set the version_end_date of the current item to the - current timestamp --> t[now]. - Like that, there is no more current entry having that identity - which is equal to - not existing for timestamps greater than t[now]. + Instead of removing items, we simply set the version_end_date of + the current item to the current timestamp --> t[now]. + Like that, there is no more current entry having that identity - + which is equal to not existing for timestamps greater than t[now]. """ - return self._remove_items_at(None, source_field_name, target_field_name, *objs) + return self._remove_items_at(None, source_field_name, + target_field_name, *objs) - def _remove_items_at(self, timestamp, source_field_name, target_field_name, *objs): + def _remove_items_at(self, timestamp, source_field_name, + target_field_name, *objs): if objs: if timestamp is None: timestamp = get_utc_now() @@ -390,10 +452,14 @@ def _remove_items_at(self, timestamp, source_field_name, target_field_name, *obj if isinstance(obj, self.model): # The Django 1.7-way is preferred if hasattr(self, 'target_field'): - fk_val = self.target_field.get_foreign_related_value(obj)[0] + fk_val = \ + self.target_field \ + .get_foreign_related_value(obj)[0] else: - raise TypeError("We couldn't find the value of the foreign key, this might be due to the " - "use of an unsupported version of Django") + raise TypeError( + "We couldn't find the value of the foreign " + "key, this might be due to the use of an " + "unsupported version of Django") old_ids.add(fk_val) else: old_ids.add(obj) @@ -409,13 +475,16 @@ def _remove_items_at(self, timestamp, source_field_name, target_field_name, *obj def add(self, *objs): if not self.instance.is_current: raise SuspiciousOperation( - "Adding many-to-many related objects is only possible on the current version") - - # The ManyRelatedManager.add() method uses the through model's default manager to get - # a queryset when looking at which objects already exist in the database. - # In order to restrict the query to the current versions when that is done, - # we temporarily replace the queryset's using method so that the version validity - # condition can be specified. + "Adding many-to-many related objects is only possible " + "on the current version") + + # The ManyRelatedManager.add() method uses the through model's + # default manager to get a queryset when looking at which + # objects already exist in the database. + # In order to restrict the query to the current versions when + # that is done, we temporarily replace the queryset's using + # method so that the version validity condition can be + # specified. klass = self.through._default_manager.get_queryset().__class__ __using_backup = klass.using @@ -429,7 +498,8 @@ def using_replacement(self, *args, **kwargs): def add_at(self, timestamp, *objs): """ - This function adds an object at a certain point in time (timestamp) + This function adds an object at a certain point in time + (timestamp) """ # First off, define the new constructor @@ -438,15 +508,18 @@ def _through_init(self, *args, **kwargs): self.version_birth_date = timestamp self.version_start_date = timestamp - # Through-classes have an empty constructor, so it can easily be overwritten when needed; - # This is not the default case, so the overwrite only takes place when we "modify the past" + # Through-classes have an empty constructor, so it can easily + # be overwritten when needed; + # This is not the default case, so the overwrite only takes + # place when we "modify the past" self.through.__init_backup__ = self.through.__init__ self.through.__init__ = _through_init # Do the add operation self.add(*objs) - # Remove the constructor again (by replacing it with the original empty constructor) + # Remove the constructor again (by replacing it with the + # original empty constructor) self.through.__init__ = self.through.__init_backup__ del self.through.__init_backup__ @@ -455,14 +528,18 @@ def _through_init(self, *args, **kwargs): if 'remove' in dir(many_related_manager_klass): def remove_at(self, timestamp, *objs): """ - Performs the act of removing specified relationships at a specified time (timestamp); - So, not the objects at a given time are removed, but their relationship! + Performs the act of removing specified relationships at a + specified time (timestamp); + So, not the objects at a given time are removed, but their + relationship! """ - self._remove_items_at(timestamp, self.source_field_name, self.target_field_name, *objs) + self._remove_items_at(timestamp, self.source_field_name, + self.target_field_name, *objs) # For consistency, also handle the symmetrical case if self.symmetrical: - self._remove_items_at(timestamp, self.target_field_name, self.source_field_name, *objs) + self._remove_items_at(timestamp, self.target_field_name, + self.source_field_name, *objs) remove_at.alters_data = True diff --git a/versions/fields.py b/versions/fields.py index 57d5e89..f780531 100644 --- a/versions/fields.py +++ b/versions/fields.py @@ -14,60 +14,72 @@ class VersionedForeignKey(ForeignKey): """ - We need to replace the standard ForeignKey declaration in order to be able to introduce - the VersionedReverseSingleRelatedObjectDescriptor, which allows to go back in time... - We also want to allow keeping track of any as_of time so that joins can be restricted - based on that. + We need to replace the standard ForeignKey declaration in order to be able + to introduce the VersionedReverseSingleRelatedObjectDescriptor, which + allows to go back in time... + We also want to allow keeping track of any as_of time so that joins can + be restricted based on that. """ def __init__(self, *args, **kwargs): super(VersionedForeignKey, self).__init__(*args, **kwargs) def contribute_to_class(self, cls, name, virtual_only=False): - super(VersionedForeignKey, self).contribute_to_class(cls, name, virtual_only) + super(VersionedForeignKey, self).contribute_to_class(cls, name, + virtual_only) setattr(cls, self.name, VersionedForwardManyToOneDescriptor(self)) def contribute_to_related_class(self, cls, related): """ - Override ForeignKey's methods, and replace the descriptor, if set by the parent's methods + Override ForeignKey's methods, and replace the descriptor, if set by + the parent's methods """ # Internal FK's - i.e., those with a related name ending with '+' - # and swapped models don't get a related descriptor. - super(VersionedForeignKey, self).contribute_to_related_class(cls, related) + super(VersionedForeignKey, self).contribute_to_related_class(cls, + related) accessor_name = related.get_accessor_name() if hasattr(cls, accessor_name): - setattr(cls, accessor_name, VersionedReverseManyToOneDescriptor(related)) + setattr(cls, accessor_name, + VersionedReverseManyToOneDescriptor(related)) def get_extra_restriction(self, where_class, alias, remote_alias): """ - Overrides ForeignObject's get_extra_restriction function that returns an SQL statement which is appended to a - JOIN's conditional filtering part + Overrides ForeignObject's get_extra_restriction function that returns + an SQL statement which is appended to a JOIN's conditional filtering + part :return: SQL conditional statement :rtype: WhereNode """ historic_sql = '''{alias}.version_start_date <= %s - AND ({alias}.version_end_date > %s OR {alias}.version_end_date is NULL )''' + AND ({alias}.version_end_date > %s + OR {alias}.version_end_date is NULL )''' current_sql = '''{alias}.version_end_date is NULL''' # How 'bout creating an ExtraWhere here, without params - return where_class([VersionedExtraWhere(historic_sql=historic_sql, current_sql=current_sql, alias=alias, + return where_class([VersionedExtraWhere(historic_sql=historic_sql, + current_sql=current_sql, + alias=alias, remote_alias=remote_alias)]) def get_joining_columns(self, reverse_join=False): """ Get and return joining columns defined by this foreign key relationship - :return: A tuple containing the column names of the tables to be joined (, ) + :return: A tuple containing the column names of the tables to be + joined (, ) :rtype: tuple """ - source = self.reverse_related_fields if reverse_join else self.related_fields + source = self.reverse_related_fields if reverse_join \ + else self.related_fields joining_columns = tuple() for lhs_field, rhs_field in source: lhs_col_name = lhs_field.column rhs_col_name = rhs_field.column # Test whether # - self is the current ForeignKey relationship - # - self was not auto_created (e.g. is not part of a M2M relationship) + # - self was not auto_created (e.g. is not part of a M2M + # relationship) if self is lhs_field and not self.auto_created: if rhs_col_name == Versionable.VERSION_IDENTIFIER_FIELD: rhs_col_name = Versionable.OBJECT_IDENTIFIER_FIELD @@ -80,10 +92,15 @@ def get_joining_columns(self, reverse_join=False): def get_reverse_related_filter(self, obj): base_filter = dict() for lh_field, rh_field in self.related_fields: - if isinstance(obj, Versionable) and rh_field.attname == Versionable.VERSION_IDENTIFIER_FIELD: - base_filter.update(**{Versionable.OBJECT_IDENTIFIER_FIELD: getattr(obj, lh_field.attname)}) + if isinstance(obj, Versionable) and \ + rh_field.attname == \ + Versionable.VERSION_IDENTIFIER_FIELD: + base_filter.update(**{ + Versionable.OBJECT_IDENTIFIER_FIELD: + getattr(obj, lh_field.attname)}) else: - base_filter.update(**{rh_field.attname: getattr(obj, lh_field.attname)}) + base_filter.update( + **{rh_field.attname: getattr(obj, lh_field.attname)}) descriptor_filter = self.get_extra_descriptor_filter(obj) base_q = Q(**base_filter) if isinstance(descriptor_filter, dict): @@ -91,7 +108,6 @@ def get_reverse_related_filter(self, obj): elif descriptor_filter: return base_q & descriptor_filter return base_q - # return super(VersionedForeignKey, self).get_reverse_related_filter(obj) class VersionedManyToManyField(ManyToManyField): @@ -100,56 +116,75 @@ def __init__(self, *args, **kwargs): def contribute_to_class(self, cls, name, **kwargs): """ - Called at class type creation. So, this method is called, when metaclasses get created + Called at class type creation. So, this method is called, when + metaclasses get created """ - # TODO: Apply 3 edge cases when not to create an intermediary model specified in django.db.models.fields.related:1566 - # self.rel.through needs to be set prior to calling super, since super(...).contribute_to_class refers to it. - # Classes pointed to by a string do not need to be resolved here, since Django does that at a later point in - # time - which is nice... ;) + # TODO: Apply 3 edge cases when not to create an intermediary model + # specified in django.db.models.fields.related:1566 + # self.rel.through needs to be set prior to calling super, since + # super(...).contribute_to_class refers to it. + # Classes pointed to by a string do not need to be resolved here, + # since Django does that at a later point in time - which is nice... ;) # # Superclasses take care of: # - creating the through class if unset # - resolving the through class if it's a string # - resolving string references within the through class - if not self.remote_field.through and not cls._meta.abstract and not cls._meta.swapped: - # We need to anticipate some stuff, that's done only later in class contribution + if not self.remote_field.through and \ + not cls._meta.abstract and \ + not cls._meta.swapped: + # We need to anticipate some stuff, that's done only later in + # class contribution self.set_attributes_from_name(name) self.model = cls - self.remote_field.through = VersionedManyToManyField.create_versioned_many_to_many_intermediary_model(self, - cls, - name) + self.remote_field.through = \ + VersionedManyToManyField \ + .create_versioned_many_to_many_intermediary_model(self, + cls, + name) super(VersionedManyToManyField, self).contribute_to_class(cls, name) # Overwrite the descriptor if hasattr(cls, self.name): - setattr(cls, self.name, VersionedManyToManyDescriptor(self.remote_field)) + setattr(cls, self.name, + VersionedManyToManyDescriptor(self.remote_field)) def contribute_to_related_class(self, cls, related): """ - Called at class type creation. So, this method is called, when metaclasses get created + Called at class type creation. So, this method is called, when + metaclasses get created """ - super(VersionedManyToManyField, self).contribute_to_related_class(cls, related) + super(VersionedManyToManyField, self). \ + contribute_to_related_class(cls, related) accessor_name = related.get_accessor_name() if accessor_name and hasattr(cls, accessor_name): descriptor = VersionedManyToManyDescriptor(related, accessor_name) setattr(cls, accessor_name, descriptor) - if hasattr(cls._meta, 'many_to_many_related') and isinstance(cls._meta.many_to_many_related, list): + if hasattr(cls._meta, 'many_to_many_related') and isinstance( + cls._meta.many_to_many_related, list): cls._meta.many_to_many_related.append(descriptor) else: cls._meta.many_to_many_related = [descriptor] @staticmethod - def create_versioned_many_to_many_intermediary_model(field, cls, field_name): - # TODO: Verify functionality against django.db.models.fields.related:1048 - # Let's not care too much on what flags could potentially be set on that intermediary class (e.g. managed, etc) - # Let's play the game, as if the programmer had specified a class within his models... Here's how. - - # FIXME: VersionedManyToManyModels do not get registered in the apps models. + def create_versioned_many_to_many_intermediary_model(field, cls, + field_name): + # TODO: Verify functionality against + # django.db.models.fields.related:1048 + # Let's not care too much on what flags could potentially be set on + # that intermediary class (e.g. managed, etc) + # Let's play the game, as if the programmer had specified a class + # within his models... Here's how. + + # FIXME: VersionedManyToManyModels do not get registered in the + # apps models. # FIXME: This is usually done at django/db/models/base.py:284, - # invoked by create_many_to_many_intermediary_model at django.db.models.fields.related:1048 + # invoked by create_many_to_many_intermediary_model at + # django.db.models.fields.related:1048 def set_managed(model, related, through): - through._meta.managed = model._meta.managed or related._meta.managed + through._meta.managed = model._meta.managed or \ + related._meta.managed to_model = resolve_relation(cls, field.remote_field.model) @@ -168,10 +203,13 @@ def set_managed(model, related, through): 'auto_created': cls, 'app_label': cls._meta.app_label, 'db_tablespace': cls._meta.db_tablespace, - # 'unique_together' is not applicable as is, due to multiple versions to be allowed to exist. + # 'unique_together' is not applicable as is, due to multiple + # versions to be allowed to exist. # 'unique_together': (from_, to), - 'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, 'to': to}, - 'verbose_name_plural': '%(from)s-%(to)s relationships' % {'from': from_, 'to': to}, + 'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, + 'to': to}, + 'verbose_name_plural': '%(from)s-%(to)s relationships' % { + 'from': from_, 'to': to}, 'apps': field.model._meta.apps, }) return type(str(name), (Versionable,), { @@ -234,19 +272,23 @@ def as_sql(self, qn=None, connection=None): # Fail fast for inacceptable cases if self._as_of_time_set and not self._joined_alias: - raise ValueError("joined_alias is not set, but as_of is; this is a conflict!") + raise ValueError( + "joined_alias is not set, but as_of is; this is a conflict!") # Set the SQL string in dependency of whether as_of_time was set or not if self._as_of_time_set: if self.as_of_time: sql = self.historic_sql params = [self.as_of_time] * 2 - # 2 is the number of occurences of the timestamp in an as_of-filter expression + # 2 is the number of occurences of the timestamp in an + # as_of-filter expression else: - # If as_of_time was set to None, we're dealing with a query for "current" values + # If as_of_time was set to None, we're dealing with a query + # for "current" values sql = self.current_sql else: - # No as_of_time has been set; Perhaps, as_of was not part of the query -> That's OK + # No as_of_time has been set; Perhaps, as_of was not part of the + # query -> That's OK pass # By here, the sql string is defined if an as_of_time was provided @@ -266,11 +308,12 @@ def as_sql(self, qn=None, connection=None): class VersionedWhereNode(WhereNode): def as_sql(self, qn, connection): """ - This method identifies joined table aliases in order for VersionedExtraWhere.as_sql() - to be able to add time restrictions for those tables based on the VersionedQuery's - querytime value. + This method identifies joined table aliases in order for + VersionedExtraWhere.as_sql() to be able to add time restrictions for + those tables based on the VersionedQuery's querytime value. - :param qn: In Django 1.7 & 1.8 this is a compiler; in 1.6, it's an instance-method + :param qn: In Django 1.7 & 1.8 this is a compiler; in 1.6, it's an + instance-method :param connection: A DB connection :return: A tuple consisting of (sql_string, result_params) """ diff --git a/versions/models.py b/versions/models.py index 8f0ee45..a3ad64e 100644 --- a/versions/models.py +++ b/versions/models.py @@ -30,7 +30,8 @@ from django.utils.timezone import utc from versions.exceptions import DeletionOfNonCurrentVersionError -from versions.settings import get_versioned_delete_collector_class, settings as versions_settings +from versions.settings import get_versioned_delete_collector_class, \ + settings as versions_settings from versions.util import get_utc_now @@ -60,7 +61,8 @@ class VersionManager(models.Manager): def get_queryset(self): """ - Returns a VersionedQuerySet capable of handling version time restrictions. + Returns a VersionedQuerySet capable of handling version time + restrictions. :return: VersionedQuerySet """ @@ -72,7 +74,8 @@ def get_queryset(self): def as_of(self, time=None): """ Filters Versionables at a given time - :param time: The timestamp (including timezone info) at which Versionables shall be retrieved + :param time: The timestamp (including timezone info) at which + Versionables shall be retrieved :return: A QuerySet containing the base for a timestamped query. """ return self.get_queryset().as_of(time) @@ -84,19 +87,21 @@ def next_version(self, object, relations_as_of='end'): In case there is no next object existing, meaning the given object is the current version, the function returns this version. - Note that if object's version_end_date is None, this does not check the database to - see if there is a newer version (perhaps created by some other code), it simply - returns the passed object. + Note that if object's version_end_date is None, this does not check + the database to see if there is a newer version (perhaps created by + some other code), it simply returns the passed object. - ``relations_as_of`` is used to fix the point in time for the version; this affects which related - objects are returned when querying for object relations. See ``VersionManager.version_as_of`` - for details on valid ``relations_as_of`` values. + ``relations_as_of`` is used to fix the point in time for the version; + this affects which related objects are returned when querying for + object relations. See ``VersionManager.version_as_of`` for details + on valid ``relations_as_of`` values. :param Versionable object: object whose next version will be returned. - :param mixed relations_as_of: determines point in time used to access relations. 'start'|'end'|datetime|None + :param mixed relations_as_of: determines point in time used to access + relations. 'start'|'end'|datetime|None :return: Versionable """ - if object.version_end_date == None: + if object.version_end_date is None: next = object else: next = self.filter( @@ -106,7 +111,8 @@ def next_version(self, object, relations_as_of='end'): if not next: raise ObjectDoesNotExist( - "next_version couldn't find a next version of object " + str(object.identity)) + "next_version couldn't find a next version of object " + + str(object.identity)) return self.adjust_version_as_of(next, relations_as_of) @@ -115,14 +121,18 @@ def previous_version(self, object, relations_as_of='end'): Return the previous version of the given object. In case there is no previous object existing, meaning the given object - is the first version of the object, then the function returns this version. - - ``relations_as_of`` is used to fix the point in time for the version; this affects which related - objects are returned when querying for object relations. See ``VersionManager.version_as_of`` - for details on valid ``relations_as_of`` values. - - :param Versionable object: object whose previous version will be returned. - :param mixed relations_as_of: determines point in time used to access relations. 'start'|'end'|datetime|None + is the first version of the object, then the function returns this + version. + + ``relations_as_of`` is used to fix the point in time for the version; + this affects which related objects are returned when querying for + object relations. See ``VersionManager.version_as_of`` for details on + valid ``relations_as_of`` values. + + :param Versionable object: object whose previous version will be + returned. + :param mixed relations_as_of: determines point in time used to access + relations. 'start'|'end'|datetime|None :return: Versionable """ if object.version_birth_date == object.version_start_date: @@ -135,7 +145,8 @@ def previous_version(self, object, relations_as_of='end'): if not previous: raise ObjectDoesNotExist( - "previous_version couldn't find a previous version of object " + str(object.identity)) + "previous_version couldn't find a previous version of " + "object " + str(object.identity)) return self.adjust_version_as_of(previous, relations_as_of) @@ -144,20 +155,26 @@ def current_version(self, object, relations_as_of=None, check_db=False): Return the current version of the given object. The current version is the one having its version_end_date set to NULL. - If there is not such a version then it means the object has been 'deleted' - and so there is no current version available. In this case the function returns None. - - Note that if check_db is False and object's version_end_date is None, this does not - check the database to see if there is a newer version (perhaps created by some other code), - it simply returns the passed object. - - ``relations_as_of`` is used to fix the point in time for the version; this affects which related - objects are returned when querying for object relations. See ``VersionManager.version_as_of`` - for details on valid ``relations_as_of`` values. - - :param Versionable object: object whose current version will be returned. - :param mixed relations_as_of: determines point in time used to access relations. 'start'|'end'|datetime|None - :param bool check_db: Whether or not to look in the database for a more recent version + If there is not such a version then it means the object has been + 'deleted' and so there is no current version available. In this case + the function returns None. + + Note that if check_db is False and object's version_end_date is None, + this does not check the database to see if there is a newer version + (perhaps created by some other code), it simply returns the passed + object. + + ``relations_as_of`` is used to fix the point in time for the version; + this affects which related objects are returned when querying for + object relations. See ``VersionManager.version_as_of`` for details on + valid ``relations_as_of`` values. + + :param Versionable object: object whose current version will be + returned. + :param mixed relations_as_of: determines point in time used to access + relations. 'start'|'end'|datetime|None + :param bool check_db: Whether or not to look in the database for a + more recent version :return: Versionable """ if object.version_end_date is None and not check_db: @@ -170,18 +187,26 @@ def current_version(self, object, relations_as_of=None, check_db=False): @staticmethod def adjust_version_as_of(version, relations_as_of): """ - Adjusts the passed version's as_of time to an appropriate value, and returns it. - - ``relations_as_of`` is used to fix the point in time for the version; this affects which related - objects are returned when querying for object relations. - Valid ``relations_as_of`` values and how this affects the returned version's as_of attribute: + Adjusts the passed version's as_of time to an appropriate value, and + returns it. + + ``relations_as_of`` is used to fix the point in time for the version; + this affects which related objects are returned when querying for + object relations. + Valid ``relations_as_of`` values and how this affects the returned + version's as_of attribute: - 'start': version start date - - 'end': version end date - 1 microsecond (no effect if version is current version) - - datetime object: given datetime (raises ValueError if given datetime not valid for version) - - None: unset (related object queries will not be restricted to a point in time) - - :param Versionable object: object whose as_of will be adjusted as requested. - :param mixed relations_as_of: valid values are the strings 'start' or 'end', or a datetime object. + - 'end': version end date - 1 microsecond (no effect if version is + current version) + - datetime object: given datetime (raises ValueError if given datetime + not valid for version) + - None: unset (related object queries will not be restricted to a + point in time) + + :param Versionable object: object whose as_of will be adjusted as + requested. + :param mixed relations_as_of: valid values are the strings 'start' or + 'end', or a datetime object. :return: Versionable """ if not version: @@ -189,24 +214,29 @@ def adjust_version_as_of(version, relations_as_of): if relations_as_of == 'end': if version.is_current: - # Ensure that version._querytime is active, in case it wasn't before. + # Ensure that version._querytime is active, in case it wasn't + # before. version.as_of = None else: - version.as_of = version.version_end_date - datetime.timedelta(microseconds=1) + version.as_of = version.version_end_date - datetime.timedelta( + microseconds=1) elif relations_as_of == 'start': version.as_of = version.version_start_date elif isinstance(relations_as_of, datetime.datetime): as_of = relations_as_of.astimezone(utc) if not as_of >= version.version_start_date: raise ValueError( - "Provided as_of '{}' is earlier than version's start time '{}'".format( + "Provided as_of '{}' is earlier than version's start " + "time '{}'".format( as_of.isoformat(), version.version_start_date.isoformat() ) ) - if version.version_end_date is not None and as_of >= version.version_end_date: + if version.version_end_date is not None \ + and as_of >= version.version_end_date: raise ValueError( - "Provided as_of '{}' is later than version's start time '{}'".format( + "Provided as_of '{}' is later than version's start " + "time '{}'".format( as_of.isoformat(), version.version_end_date.isoformat() ) @@ -215,7 +245,9 @@ def adjust_version_as_of(version, relations_as_of): elif relations_as_of is None: version._querytime = QueryTime(time=None, active=False) else: - raise TypeError("as_of parameter must be 'start', 'end', None, or datetime object") + raise TypeError( + "as_of parameter must be 'start', 'end', None, or datetime " + "object") return version @@ -231,14 +263,19 @@ def create(self, **kwargs): """ return self._create_at(None, **kwargs) - def _create_at(self, timestamp=None, id=None, forced_identity=None, **kwargs): + def _create_at(self, timestamp=None, id=None, forced_identity=None, + **kwargs): """ WARNING: Only for internal use and testing. - Create a Versionable having a version_start_date and version_birth_date set to some pre-defined timestamp + Create a Versionable having a version_start_date and + version_birth_date set to some pre-defined timestamp + :param timestamp: point in time at which the instance has to be created - :param id: version 4 UUID unicode object. Usually this is not specified, it will be automatically created. - :param forced_identity: version 4 UUID unicode object. For internal use only. + :param id: version 4 UUID unicode object. Usually this is not + specified, it will be automatically created. + :param forced_identity: version 4 UUID unicode object. For internal + use only. :param kwargs: arguments needed for initializing the instance :return: an instance of the class """ @@ -260,9 +297,9 @@ def _create_at(self, timestamp=None, id=None, forced_identity=None, **kwargs): class VersionedWhereNode(WhereNode): def as_sql(self, qn, connection): """ - This method identifies joined table aliases in order for VersionedExtraWhere.as_sql() - to be able to add time restrictions for those tables based on the VersionedQuery's - querytime value. + This method identifies joined table aliases in order for + VersionedExtraWhere.as_sql() to be able to add time restrictions for + those tables based on the VersionedQuery's querytime value. :param qn: In Django 1.7 & 1.8 this is a compiler :param connection: A DB connection @@ -305,9 +342,10 @@ def _set_child_joined_alias(child, alias_map): class VersionedQuery(Query): """ - VersionedQuery has awareness of the query time restrictions. When the query is compiled, - this query time information is passed along to the foreign keys involved in the query, so - that they can provide that information when building the sql. + VersionedQuery has awareness of the query time restrictions. When the + query is compiled, this query time information is passed along to the + foreign keys involved in the query, so that they can provide that + information when building the sql. """ def __init__(self, *args, **kwargs): @@ -321,75 +359,92 @@ def clone(self, *args, **kwargs): try: _clone.querytime = self.querytime except AttributeError: - # If the caller is using clone to create a different type of Query, that's OK. - # An example of this is when creating or updating an object, this method is called - # with a first parameter of sql.UpdateQuery. + # If the caller is using clone to create a different type of Query, + # that's OK. + # An example of this is when creating or updating an object, this + # method is called with a first parameter of sql.UpdateQuery. pass return _clone def get_compiler(self, *args, **kwargs): """ - Add the query time restriction limit at the last moment. Applying it earlier - (e.g. by adding a filter to the queryset) does not allow the caching of related - object to work (they are attached to a queryset; filter() returns a new queryset). + Add the query time restriction limit at the last moment. Applying it + earlier (e.g. by adding a filter to the queryset) does not allow the + caching of related object to work (they are attached to a queryset; + filter() returns a new queryset). """ - if self.querytime.active and (not hasattr(self, '_querytime_filter_added') or not self._querytime_filter_added): + if self.querytime.active and \ + (not hasattr(self, '_querytime_filter_added') or + not self._querytime_filter_added): time = self.querytime.time if time is None: self.add_q(Q(version_end_date__isnull=True)) else: self.add_q( - (Q(version_end_date__gt=time) | Q(version_end_date__isnull=True)) - & Q(version_start_date__lte=time) + (Q(version_end_date__gt=time) | + Q(version_end_date__isnull=True)) & + Q(version_start_date__lte=time) ) - # Ensure applying these filters happens only a single time (even if it doesn't falsify the query, it's - # just not very comfortable to read) + # Ensure applying these filters happens only a single time (even + # if it doesn't falsify the query, it's just not very comfortable + # to read) self._querytime_filter_added = True return super(VersionedQuery, self).get_compiler(*args, **kwargs) def build_filter(self, filter_expr, **kwargs): """ - When a query is filtered with an expression like .filter(team=some_team_object), - where team is a VersionedForeignKey field, and some_team_object is a Versionable object, - adapt the filter value to be (team__identity=some_team_object.identity). + When a query is filtered with an expression like + .filter(team=some_team_object), where team is a VersionedForeignKey + field, and some_team_object is a Versionable object, adapt the filter + value to be (team__identity=some_team_object.identity). - When the query is built, this will enforce that the tables are joined and that - the identity column and the as_of restriction is used for matching. + When the query is built, this will enforce that the tables are joined + and that the identity column and the as_of restriction is used for + matching. For example, the generated SQL will be like: SELECT ... FROM foo INNER JOIN team ON ( foo.team_id == team.identity AND foo.version_start_date <= [as_of] - AND (foo.version_end_date > [as_of] OR foo.version_end_date IS NULL)) ... + AND (foo.version_end_date > [as_of] + OR foo.version_end_date IS NULL)) ... This is not necessary, and won't be applied, if any of these are true: - no as_of is in effect - - the current objects are being queried (e.g. foo.objects.current.filter(...)) - - a terminal object is being used as the lookup value (e.g. .filter(team=the_deleted_team_version) - - the lookup value is not a Versionable (e.g. .filter(foo='bar') or .filter(team=non_versionable_team) - - Note that this has the effect that Foo.objects.as_of(t1).filter(team=team_object_at_t3) will - return the Foo objects at t1, and that accessing their team field (e.g. foo.team) will return - the team object that was associated with them at t1, which may be a different object than - team_object_at_t3. - - The goal is to make expressions like Foo.objects.as_of(tx).filter(team=some_team_object) work as - closely as possible to standard, non-versioned Django querysets like Foo.objects.filter(team=some_team_object). + - the current objects are being queried + (e.g. foo.objects.current.filter(...)) + - a terminal object is being used as the lookup value + (e.g. .filter(team=the_deleted_team_version) + - the lookup value is not a Versionable + (e.g. .filter(foo='bar') or .filter(team=non_versionable_team) + + Note that this has the effect that + Foo.objects.as_of(t1).filter(team=team_object_at_t3) will return the + Foo objects at t1, and that accessing their team field (e.g. foo.team) + will return the team object that was associated with them at t1, + which may be a different object than team_object_at_t3. + + The goal is to make expressions like + Foo.objects.as_of(tx).filter(team=some_team_object) work as closely + as possible to standard, non-versioned Django querysets like + Foo.objects.filter(team=some_team_object). :param filter_expr: :param kwargs: :return: tuple """ lookup, value = filter_expr - if self.querytime.active and isinstance(value, Versionable) and not value.is_latest: - new_lookup = lookup + LOOKUP_SEP + Versionable.OBJECT_IDENTIFIER_FIELD + if self.querytime.active \ + and isinstance(value, Versionable) and not value.is_latest: + new_lookup = \ + lookup + LOOKUP_SEP + Versionable.OBJECT_IDENTIFIER_FIELD filter_expr = (new_lookup, value.identity) return super(VersionedQuery, self).build_filter(filter_expr, **kwargs) def add_immediate_loading(self, field_names): - # TODO: Decide, whether we always want versionable fields to be loaded, even if ``only`` is used and they would - # be deferred + # TODO: Decide, whether we always want versionable fields to be loaded, + # even if ``only`` is used and they would be deferred # field_names += tuple(Versionable.VERSIONABLE_FIELDS) super(VersionedQuery, self).add_immediate_loading(field_names) @@ -408,7 +463,8 @@ def __init__(self, model=None, query=None, *args, **kwargs): """ if not query: query = VersionedQuery(model) - super(VersionedQuerySet, self).__init__(model=model, query=query, *args, **kwargs) + super(VersionedQuerySet, self).__init__(model=model, query=query, + *args, **kwargs) self.querytime = QueryTime(time=None, active=False) @property @@ -427,7 +483,9 @@ def querytime(self, value): def __getitem__(self, k): """ - Overrides the QuerySet.__getitem__ magic method for retrieving a list-item out of a query set. + Overrides the QuerySet.__getitem__ magic method for retrieving a + list-item out of a query set. + :param k: Retrieve the k-th element or a range of elements :return: Either one element or a list of elements """ @@ -441,12 +499,16 @@ def __getitem__(self, k): def _fetch_all(self): """ - Completely overrides the QuerySet._fetch_all method by adding the timestamp to all objects - :return: See django.db.models.query.QuerySet._fetch_all for return values + Completely overrides the QuerySet._fetch_all method by adding the + timestamp to all objects + + :return: See django.db.models.query.QuerySet._fetch_all for return + values """ if self._result_cache is None: self._result_cache = list(self.iterator()) - # TODO: Do we have to test for ValuesListIterable, ValuesIterable, and FlatValuesListIterable here? + # TODO: Do we have to test for ValuesListIterable, ValuesIterable, + # and FlatValuesListIterable here? if self._iterable_class == ModelIterable: for x in self._result_cache: self._set_item_querytime(x) @@ -455,9 +517,12 @@ def _fetch_all(self): def _clone(self, *args, **kwargs): """ - Overrides the QuerySet._clone method by adding the cloning of the VersionedQuerySet's query_time parameter + Overrides the QuerySet._clone method by adding the cloning of the + VersionedQuerySet's query_time parameter + :param kwargs: Same as the original QuerySet._clone params - :return: Just as QuerySet._clone, this method returns a clone of the original object + :return: Just as QuerySet._clone, this method returns a clone of the + original object """ clone = super(VersionedQuerySet, self)._clone(**kwargs) clone.querytime = self.querytime @@ -466,6 +531,7 @@ def _clone(self, *args, **kwargs): def _set_item_querytime(self, item, type_check=True): """ Sets the time for which the query was made on the resulting item + :param item: an item of type Versionable :param type_check: Check the item to be a Versionable :return: Returns the item itself with the time set @@ -476,13 +542,17 @@ def _set_item_querytime(self, item, type_check=True): item.querytime = self.querytime else: if type_check: - raise TypeError("This item is not a Versionable, it's a " + str(type(item))) + raise TypeError( + "This item is not a Versionable, it's a " + str( + type(item))) return item def as_of(self, qtime=None): """ Sets the time for which we want to retrieve an object. - :param qtime: The UTC date and time; if None then use the current state (where version_end_date = NULL) + + :param qtime: The UTC date and time; if None then use the current + state (where version_end_date = NULL) :return: A VersionedQuerySet """ clone = self._clone() @@ -527,40 +597,49 @@ class Versionable(models.Model): """ VERSION_IDENTIFIER_FIELD = 'id' OBJECT_IDENTIFIER_FIELD = 'identity' - VERSIONABLE_FIELDS = [VERSION_IDENTIFIER_FIELD, OBJECT_IDENTIFIER_FIELD, 'version_start_date', + VERSIONABLE_FIELDS = [VERSION_IDENTIFIER_FIELD, OBJECT_IDENTIFIER_FIELD, + 'version_start_date', 'version_end_date', 'version_birth_date'] if versions_settings.VERSIONS_USE_UUIDFIELD: id = models.UUIDField(primary_key=True) - """id stands for ID and is the primary key; sometimes also referenced as the surrogate key""" + """id stands for ID and is the primary key; sometimes also referenced + as the surrogate key""" else: id = models.CharField(max_length=36, primary_key=True) if versions_settings.VERSIONS_USE_UUIDFIELD: identity = models.UUIDField() - """identity is used as the identifier of an object, ignoring its versions; sometimes also referenced as the natural key""" + """identity is used as the identifier of an object, ignoring its + versions; sometimes also referenced as the natural key""" else: identity = models.CharField(max_length=36) - """identity is used as the identifier of an object, ignoring its versions; sometimes also referenced as the natural key""" + """identity is used as the identifier of an object, ignoring its + versions; sometimes also referenced as the natural key""" version_start_date = models.DateTimeField() - """version_start_date points the moment in time, when a version was created (ie. an versionable was cloned). - This means, it points the start of a clone's validity period""" + """version_start_date points the moment in time, when a version was + created (ie. an versionable was cloned). This means, it points the start + of a clone's validity period""" - version_end_date = models.DateTimeField(null=True, default=None, blank=True) - """version_end_date, if set, points the moment in time, when the entry was duplicated (ie. the entry was cloned). It - points therefore the end of a clone's validity period""" + version_end_date = models.DateTimeField(null=True, default=None, + blank=True) + """version_end_date, if set, points the moment in time, when the entry was + duplicated (ie. the entry was cloned). It points therefore the end of a + clone's validity period""" version_birth_date = models.DateTimeField() - """version_birth_date contains the timestamp pointing to when the versionable has been created (independent of any - version); This timestamp is bound to an identity""" + """version_birth_date contains the timestamp pointing to when the + versionable has been created (independent of any version); This timestamp + is bound to an identity""" objects = VersionManager() """Make the versionable compliant with Django""" as_of = None - """Hold the timestamp at which the object's data was looked up. Its value must always be in between the - version_start_date and the version_end_date""" + """Hold the timestamp at which the object's data was looked up. Its value + must always be in between the version_start_date and the + version_end_date""" class Meta: abstract = True @@ -573,9 +652,10 @@ def __init__(self, *args, **kwargs): self._querytime = QueryTime(time=None, active=False) # Ensure that the versionable field values are set. - # If there are any deferred fields, then this instance is being initialized - # from data in the database, and thus these values will already be set (unless - # the fields are deferred, in which case they should not be set here). + # If there are any deferred fields, then this instance is being + # initialized from data in the database, and thus these values will + # already be set (unless the fields are deferred, in which case they + # should not be set here). if not self.get_deferred_fields(): if not getattr(self, 'version_start_date', None): setattr(self, 'version_start_date', get_utc_now()) @@ -584,13 +664,14 @@ def __init__(self, *args, **kwargs): if not getattr(self, self.VERSION_IDENTIFIER_FIELD, None): setattr(self, self.VERSION_IDENTIFIER_FIELD, self.uuid()) if not getattr(self, self.OBJECT_IDENTIFIER_FIELD, None): - setattr(self, self.OBJECT_IDENTIFIER_FIELD, getattr(self, self.VERSION_IDENTIFIER_FIELD)) + setattr(self, self.OBJECT_IDENTIFIER_FIELD, + getattr(self, self.VERSION_IDENTIFIER_FIELD)) def delete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, \ - "{} object can't be deleted because its {} attribute is set to None.".format( - self._meta.object_name, self._meta.pk.attname) + "{} object can't be deleted because its {} attribute is set to " \ + "None.".format(self._meta.object_name, self._meta.pk.attname) collector_class = get_versioned_delete_collector_class() collector = collector_class(using=using) @@ -605,14 +686,15 @@ def _delete_at(self, timestamp, using=None): It is used only in the case when you want to make sure a group of related objects are deleted at the exact same time. - It is certainly not meant to be used for deleting an object and giving it - a random deletion date of your liking. + It is certainly not meant to be used for deleting an object and giving + it a random deletion date of your liking. """ if self.version_end_date is None: self.version_end_date = timestamp self.save(force_update=True, using=using) else: - raise DeletionOfNonCurrentVersionError('Cannot delete anything else but the current version') + raise DeletionOfNonCurrentVersionError( + 'Cannot delete anything else but the current version') @property def is_current(self): @@ -623,7 +705,8 @@ def is_latest(self): """ Checks if this is the latest version. - Note that this will not check the database for a possible newer version. + Note that this will not check the database for a possible newer + version. It simply inspects the object's in-memory state. :return: boolean @@ -635,7 +718,8 @@ def is_terminated(self): """ Checks if this version has been terminated. - This will be true if a newer version has been created, or if the version has been "deleted". + This will be true if a newer version has been created, or if the + version has been "deleted". :return: boolean """ @@ -654,11 +738,13 @@ def uuid(uuid_value=None): """ Returns a uuid value that is valid to use for id and identity fields. - :return: unicode uuid object if using UUIDFields, uuid unicode string otherwise. + :return: unicode uuid object if using UUIDFields, uuid unicode string + otherwise. """ if uuid_value: if not validate_uuid(uuid_value): - raise ValueError("uuid_value must be a valid UUID version 4 object") + raise ValueError( + "uuid_value must be a valid UUID version 4 object") else: uuid_value = uuid.uuid4() @@ -680,33 +766,43 @@ def _clone_at(self, timestamp): def clone(self, forced_version_date=None, in_bulk=False): """ Clones a Versionable and returns a fresh copy of the original object. - Original source: ClonableMixin snippet (http://djangosnippets.org/snippets/1271), with the pk/id change + Original source: ClonableMixin snippet + (http://djangosnippets.org/snippets/1271), with the pk/id change suggested in the comments - :param forced_version_date: a timestamp including tzinfo; this value is usually set only internally! - :param in_bulk: whether not to write this objects to the database already, if not necessary; this value is - usually set only internally for performance optimization - :return: returns a fresh clone of the original object (with adjusted relations) + :param forced_version_date: a timestamp including tzinfo; this value + is usually set only internally! + :param in_bulk: whether not to write this objects to the database + already, if not necessary; this value is usually set only + internally for performance optimization + :return: returns a fresh clone of the original object + (with adjusted relations) """ if not self.pk: raise ValueError('Instance must be saved before it can be cloned') if self.version_end_date: - raise ValueError('This is a historical item and can not be cloned.') + raise ValueError( + 'This is a historical item and can not be cloned.') if forced_version_date: - if not self.version_start_date <= forced_version_date <= get_utc_now(): - raise ValueError('The clone date must be between the version start date and now.') + if not self.version_start_date <= forced_version_date <= \ + get_utc_now(): + raise ValueError( + 'The clone date must be between the version start date ' + 'and now.') else: forced_version_date = get_utc_now() if self.get_deferred_fields(): - # It would be necessary to fetch the record from the database again for this to succeed. - # Alternatively, perhaps it would be possible to create a copy of the object after - # fetching the missing fields. - # Doing so may be unexpected by the calling code, so raise an exception: the calling - # code should be adapted if necessary. - raise ValueError('Can not clone a model instance that has deferred fields') + # It would be necessary to fetch the record from the database + # again for this to succeed. + # Alternatively, perhaps it would be possible to create a copy of + # the object after fetching the missing fields. + # Doing so may be unexpected by the calling code, so raise an + # exception: the calling code should be adapted if necessary. + raise ValueError( + 'Can not clone a model instance that has deferred fields') earlier_version = self @@ -714,15 +810,15 @@ def clone(self, forced_version_date=None, in_bulk=False): later_version.version_end_date = None later_version.version_start_date = forced_version_date - # set earlier_version's ID to a new UUID so the clone (later_version) can - # get the old one -- this allows 'head' to always have the original + # set earlier_version's ID to a new UUID so the clone (later_version) + # can get the old one -- this allows 'head' to always have the original # id allowing us to get at all historic foreign key relationships earlier_version.id = self.uuid() earlier_version.version_end_date = forced_version_date if not in_bulk: - # This condition might save us a lot of database queries if we are being called - # from a loop like in .clone_relations + # This condition might save us a lot of database queries if we are + # being called from a loop like in .clone_relations earlier_version.save() later_version.save() else: @@ -730,25 +826,30 @@ def clone(self, forced_version_date=None, in_bulk=False): # re-create ManyToMany relations for field_name in self.get_all_m2m_field_names(): - earlier_version.clone_relations(later_version, field_name, forced_version_date) + earlier_version.clone_relations(later_version, field_name, + forced_version_date) return later_version def at(self, timestamp): """ - Force the create date of an object to be at a certain time; This method can be invoked only on a - freshly created Versionable object. It must not have been cloned yet. Raises a SuspiciousOperation + Force the create date of an object to be at a certain time; This + method can be invoked only on a freshly created Versionable object. + It must not have been cloned yet. Raises a SuspiciousOperation exception, otherwise. :param timestamp: a datetime.datetime instance """ # Ensure, it's not a historic item if not self.is_current: raise SuspiciousOperation( - "Cannot relocate this Versionable instance in time, since it is a historical item") - # Ensure it's not a versioned item (that would lead to some ugly situations... + "Cannot relocate this Versionable instance in time, since it " + "is a historical item") + # Ensure it's not a versioned item (that would lead to some ugly + # situations... if not self.version_birth_date == self.version_start_date: raise SuspiciousOperation( - "Cannot relocate this Versionable instance in time, since it is a versioned instance") + "Cannot relocate this Versionable instance in time, since it " + "is a versioned instance") # Ensure the argument is really a timestamp if not isinstance(timestamp, datetime.datetime): raise ValueError("This is not a datetime.datetime timestamp") @@ -756,8 +857,11 @@ def at(self, timestamp): return self def clone_relations(self, clone, manager_field_name, forced_version_date): - # Source: the original object, where relations are currently pointing to - source = getattr(self, manager_field_name) # returns a VersionedRelatedManager instance + # Source: the original object, where relations are currently + # pointing to + source = getattr(self, + manager_field_name) + # returns a VersionedRelatedManager instance # Destination: the clone, where the cloned relations should point to destination = getattr(clone, manager_field_name) for item in source.all(): @@ -765,61 +869,82 @@ def clone_relations(self, clone, manager_field_name, forced_version_date): # retrieve all current m2m relations pointing the newly created clone # filter for source_id - m2m_rels = list(source.through.objects.filter(**{source.source_field.attname: clone.id})) + m2m_rels = list(source.through.objects.filter( + **{source.source_field.attname: clone.id})) later_current = [] later_non_current = [] for rel in m2m_rels: - # Only clone the relationship, if it is the current one; Simply adjust the older ones to point the old entry - # Otherwise, the number of pointers pointing an entry will grow exponentially + # Only clone the relationship, if it is the current one; Simply + # adjust the older ones to point the old entry. + # Otherwise, the number of pointers pointing an entry will grow + # exponentially if rel.is_current: - later_current.append(rel.clone(forced_version_date=self.version_end_date, in_bulk=True)) - # On rel, which is no more 'current', set the source ID to self.id + later_current.append( + rel.clone(forced_version_date=self.version_end_date, + in_bulk=True)) + # On rel, which is no more 'current', set the source ID to + # self.id setattr(rel, source.source_field_name, self) else: later_non_current.append(rel) - # Perform the bulk changes rel.clone() did not perform because of the in_bulk parameter + # Perform the bulk changes rel.clone() did not perform because of the + # in_bulk parameter. # This saves a huge bunch of SQL queries: # - update current version entries - source.through.objects.filter(id__in=[l.id for l in later_current]).update( + source.through.objects.filter( + id__in=[l.id for l in later_current]).update( **{'version_start_date': forced_version_date}) - # - update entries that have been pointing the current object, but have never been 'current' - source.through.objects.filter(id__in=[l.id for l in later_non_current]).update( + # - update entries that have been pointing the current object, but + # have never been 'current' + source.through.objects.filter( + id__in=[l.id for l in later_non_current]).update( **{source.source_field_name: self}) - # - create entries that were 'current', but which have been relieved in this method run - source.through.objects.bulk_create([r for r in m2m_rels if hasattr(r, '_not_created') and r._not_created]) + # - create entries that were 'current', but which have been relieved + # in this method run + source.through.objects.bulk_create( + [r for r in m2m_rels + if hasattr(r, '_not_created') and r._not_created]) def restore(self, **kwargs): """ Restores this version as a new version, and returns this new version. - If a current version already exists, it will be terminated before restoring this version. + If a current version already exists, it will be terminated before + restoring this version. - Relations (foreign key, reverse foreign key, many-to-many) are not restored with the old - version. If provided in kwargs, (Versioned)ForeignKey fields will be set to the provided - values. If passing an id for a (Versioned)ForeignKey, use the field.attname. For example: + Relations (foreign key, reverse foreign key, many-to-many) are not + restored with the old version. If provided in kwargs, + (Versioned)ForeignKey fields will be set to the provided values. + If passing an id for a (Versioned)ForeignKey, use the field.attname. + For example: restore(team_id=myteam.pk) If passing an object, simply use the field name, e.g.: restore(team=myteam) - If a (Versioned)ForeignKey is not nullable and no value is provided for it in kwargs, a - ForeignKeyRequiresValueError will be raised. + If a (Versioned)ForeignKey is not nullable and no value is provided + for it in kwargs, a ForeignKeyRequiresValueError will be raised. :param kwargs: arguments used to initialize the class instance :return: Versionable """ if not self.pk: - raise ValueError('Instance must be saved and terminated before it can be restored.') + raise ValueError( + 'Instance must be saved and terminated before it can be ' + 'restored.') if self.is_current: - raise ValueError('This is the current version, no need to restore it.') + raise ValueError( + 'This is the current version, no need to restore it.') if self.get_deferred_fields(): - # It would be necessary to fetch the record from the database again for this to succeed. - # Alternatively, perhaps it would be possible to create a copy of the object after - # fetching the missing fields. - # Doing so may be unexpected by the calling code, so raise an exception: the calling - # code should be adapted if necessary. - raise ValueError('Can not restore a model instance that has deferred fields') + # It would be necessary to fetch the record from the database + # again for this to succeed. + # Alternatively, perhaps it would be possible to create a copy + # of the object after fetching the missing fields. + # Doing so may be unexpected by the calling code, so raise an + # exception: the calling code should be adapted if necessary. + raise ValueError( + 'Can not restore a model instance that has deferred fields') cls = self.__class__ now = get_utc_now() @@ -827,18 +952,22 @@ def restore(self, **kwargs): restored.version_end_date = None restored.version_start_date = now - fields = [f for f in cls._meta.local_fields if f.name not in Versionable.VERSIONABLE_FIELDS] + fields = [f for f in cls._meta.local_fields if + f.name not in Versionable.VERSIONABLE_FIELDS] for field in fields: if field.attname in kwargs: setattr(restored, field.attname, kwargs[field.attname]) elif field.name in kwargs: setattr(restored, field.name, kwargs[field.name]) elif isinstance(field, ForeignKey): - # Set all non-provided ForeignKeys to None. If required, raise an error. + # Set all non-provided ForeignKeys to None. If required, + # raise an error. try: setattr(restored, field.name, None) # Check for non null foreign key removed since Django 1.10 - # https://docs.djangoproject.com/en/1.10/releases/1.10/#removed-null-assignment-check-for-non-null-foreign-key-fields + # https://docs.djangoproject.com/en/1.10/releases/1.10/ + # #removed-null-assignment-check-for-non-null-foreign- + # key-fields if not field.null: raise ValueError except ValueError: @@ -856,10 +985,14 @@ def restore(self, **kwargs): self.save() restored.save() - # Update ManyToMany relations to point to the old version's id instead of the restored version's id. + # Update ManyToMany relations to point to the old version's id + # instead of the restored version's id. for field_name in self.get_all_m2m_field_names(): - manager = getattr(restored, field_name) # returns a VersionedRelatedManager instance - manager.through.objects.filter(**{manager.source_field.attname: restored.id}).update( + manager = getattr(restored, + field_name) + # returns a VersionedRelatedManager instance + manager.through.objects.filter( + **{manager.source_field.attname: restored.id}).update( **{manager.source_field_name: self}) return restored @@ -868,7 +1001,8 @@ def get_all_m2m_field_names(self): opts = self._meta rel_field_names = [field.attname for field in opts.many_to_many] if hasattr(opts, 'many_to_many_related'): - rel_field_names += [rel.reverse for rel in opts.many_to_many_related] + rel_field_names += [rel.reverse for rel in + opts.many_to_many_related] return rel_field_names @@ -877,10 +1011,12 @@ def detach(self): Detaches the instance from its history. Similar to creating a new object with the same field values. The id and - identity fields are set to a new value. The returned object has not been saved, - call save() afterwards when you are ready to persist the object. + identity fields are set to a new value. The returned object has not + been saved, call save() afterwards when you are ready to persist the + object. - ManyToMany and reverse ForeignKey relations are lost for the detached object. + ManyToMany and reverse ForeignKey relations are lost for the detached + object. :return: Versionable """ @@ -903,5 +1039,6 @@ def matches_querytime(instance, querytime): if not querytime.time: return instance.version_end_date is None - return (instance.version_start_date <= querytime.time - and (instance.version_end_date is None or instance.version_end_date > querytime.time)) + return (instance.version_start_date <= querytime.time and + (instance.version_end_date is None or + instance.version_end_date > querytime.time)) diff --git a/versions/settings.py b/versions/settings.py index 272354b..d291d28 100644 --- a/versions/settings.py +++ b/versions/settings.py @@ -1,19 +1,19 @@ -from django.conf import settings as django_settings import importlib -from django import VERSION +from django import VERSION +from django.conf import settings as django_settings _cache = {} class VersionsSettings(object): """ - Gets a setting from django.conf.settings if set, otherwise from the defaults - defined in this class. + Gets a setting from django.conf.settings if set, otherwise from the + defaults defined in this class. - A magic accessor is used instead of just defining module-level variables because - Django doesn't like attributes of the django.conf.settings object to be accessed in - module scope. + A magic accessor is used instead of just defining module-level variables + because Django doesn't like attributes of the django.conf.settings object + to be accessed in module scope. """ defaults = { @@ -28,7 +28,9 @@ def __getattr__(self, name): try: return self.defaults[name] except KeyError: - raise AttributeError("{} object has no attribute {}".format(self.__class__, name)) + raise AttributeError( + "{} object has no attribute {}".format(self.__class__, + name)) settings = VersionsSettings() @@ -45,8 +47,11 @@ def import_from_string(val, setting_name): module = importlib.import_module(module_path) return getattr(module, class_name) except ImportError as e: - raise ImportError("Could not import '{}' for CleanerVersion setting '{}'. {}: {}.".format( - (val, setting_name, e.__class__.__name__, e))) + raise ImportError("Could not import '{}' for CleanerVersion " + "setting '{}'. {}: {}.".format((val, + setting_name, + e.__class__.__name__, + e))) def get_versioned_delete_collector_class(): diff --git a/versions/util/helper.py b/versions/util/helper.py index 883d06b..39dc64a 100644 --- a/versions/util/helper.py +++ b/versions/util/helper.py @@ -1,13 +1,15 @@ from __future__ import absolute_import -from django.db import connection, connections + +from versions.models import Versionable + from django import VERSION +from django.db import connection, connections if VERSION >= (1, 7): from django.apps import apps else: from django.db.models import get_app, get_models -from ..models import Versionable def database_connection(dbname=None): if dbname: @@ -15,12 +17,16 @@ def database_connection(dbname=None): else: return connection + def get_app_models(app_name, include_auto_created=False): if VERSION >= (1, 7): - return apps.get_app_config(app_name).get_models(include_auto_created=include_auto_created) + return apps.get_app_config(app_name).get_models( + include_auto_created=include_auto_created) else: - return get_models(get_app(app_name), include_auto_created=include_auto_created) + return get_models(get_app(app_name), + include_auto_created=include_auto_created) def versionable_models(app_name, include_auto_created=False): - return [m for m in get_app_models(app_name, include_auto_created) if issubclass(m, Versionable)] \ No newline at end of file + return [m for m in get_app_models(app_name, include_auto_created) if + issubclass(m, Versionable)] diff --git a/versions/util/postgresql.py b/versions/util/postgresql.py index 66b9c77..189b602 100644 --- a/versions/util/postgresql.py +++ b/versions/util/postgresql.py @@ -1,5 +1,7 @@ from __future__ import absolute_import + from django.db import connection as default_connection + from versions.fields import VersionedForeignKey from .helper import database_connection, versionable_models @@ -12,20 +14,24 @@ def index_exists(cursor, index_name): :param index_name: string :return: boolean """ - cursor.execute("SELECT COUNT(1) FROM pg_indexes WHERE indexname = %s", [index_name]) + cursor.execute("SELECT COUNT(1) FROM pg_indexes WHERE indexname = %s", + [index_name]) return cursor.fetchone()[0] > 0 def remove_uuid_id_like_indexes(app_name, database=None): """ - Remove all of varchar_pattern_ops indexes that django created for uuid columns. + Remove all of varchar_pattern_ops indexes that django created for uuid + columns. A search is never done with a filter of the style (uuid__like='1ae3c%'), so all such indexes can be removed from Versionable models. This will only try to remove indexes if they exist in the database, so it should be safe to run in a post_migrate signal handler. Running it several times should leave the database in the same state as running it once. - :param str app_name: application name whose Versionable models will be acted on. - :param str database: database alias to use. If None, use default connection. + :param str app_name: application name whose Versionable models will be + acted on. + :param str database: database alias to use. If None, use default + connection. :return: number of indexes removed :rtype: int """ @@ -44,11 +50,12 @@ def remove_uuid_id_like_indexes(app_name, database=None): def get_uuid_like_indexes_on_table(model): """ - Gets a list of database index names for the given model for the uuid-containing - fields that have had a like-index created on them. + Gets a list of database index names for the given model for the + uuid-containing fields that have had a like-index created on them. :param model: Django model - :return: list of database rows; the first field of each row is an index name + :return: list of database rows; the first field of each row is an index + name """ with default_connection.cursor() as c: indexes = select_uuid_like_indexes_on_table(model, c) @@ -57,16 +64,19 @@ def get_uuid_like_indexes_on_table(model): def select_uuid_like_indexes_on_table(model, cursor): """ - Gets a list of database index names for the given model for the uuid-containing - fields that have had a like-index created on them. + Gets a list of database index names for the given model for the + uuid-containing fields that have had a like-index created on them. :param model: Django model :param cursor: database connection cursor - :return: list of database rows; the first field of each row is an index name + :return: list of database rows; the first field of each row is an index + name """ - # VersionedForeignKey fields as well as the id fields have these useless like indexes - field_names = ["'%s'" % f.column for f in model._meta.fields if isinstance(f, VersionedForeignKey)] + # VersionedForeignKey fields as well as the id fields have these useless + # like indexes + field_names = ["'%s'" % f.column for f in model._meta.fields if + isinstance(f, VersionedForeignKey)] field_names.append("'id'") sql = """ select i.relname as index_name @@ -94,11 +104,15 @@ def create_current_version_unique_indexes(app_name, database=None): does not support. The unique indexes are defined so that no two *current* versions can have the same value. - This will only try to create indexes if they do not exist in the database, so it - should be safe to run in a post_migrate signal handler. Running it several - times should leave the database in the same state as running it once. - :param str app_name: application name whose Versionable models will be acted on. - :param str database: database alias to use. If None, use default connection. + This will only try to create indexes if they do not exist in the database, + so it should be safe to run in a post_migrate signal handler. Running it + several times should leave the database in the same state as running it + once. + + :param str app_name: application name whose Versionable models will be + acted on. + :param str database: database alias to use. If None, use default + connection. :return: number of partial unique indexes created :rtype: int """ @@ -119,25 +133,34 @@ def create_current_version_unique_indexes(app_name, database=None): column = model._meta.get_field(field).column col_prefixes.append(column[0:3]) columns.append(column) - index_name = '%s_%s_%s_v_uniq' % (app_name, table_name, '_'.join(col_prefixes)) + index_name = '%s_%s_%s_v_uniq' % ( + app_name, table_name, '_'.join(col_prefixes)) if not index_exists(cursor, index_name): - cursor.execute("CREATE UNIQUE INDEX %s ON %s(%s) WHERE version_end_date IS NULL" - % (index_name, table_name, ','.join(columns))) + cursor.execute( + "CREATE UNIQUE INDEX %s ON %s(%s) " + "WHERE version_end_date IS NULL" + % (index_name, table_name, ','.join(columns))) indexes_created += 1 return indexes_created + def create_current_version_unique_identity_indexes(app_name, database=None): """ - Add partial unique indexes for the the identity column of versionable models. + Add partial unique indexes for the the identity column of versionable + models. This enforces that no two *current* versions can have the same identity. - This will only try to create indexes if they do not exist in the database, so it - should be safe to run in a post_migrate signal handler. Running it several - times should leave the database in the same state as running it once. - :param str app_name: application name whose Versionable models will be acted on. - :param str database: database alias to use. If None, use default connection. + This will only try to create indexes if they do not exist in the database, + so it should be safe to run in a post_migrate signal handler. Running it + several times should leave the database in the same state as running it + once. + + :param str app_name: application name whose Versionable models will be + acted on. + :param str database: database alias to use. If None, use default + connection. :return: number of partial unique indexes created :rtype: int """ @@ -150,8 +173,10 @@ def create_current_version_unique_identity_indexes(app_name, database=None): table_name = model._meta.db_table index_name = '%s_%s_identity_v_uniq' % (app_name, table_name) if not index_exists(cursor, index_name): - cursor.execute("CREATE UNIQUE INDEX %s ON %s(%s) WHERE version_end_date IS NULL" - % (index_name, table_name, 'identity')) + cursor.execute( + "CREATE UNIQUE INDEX %s ON %s(%s) " + "WHERE version_end_date IS NULL" + % (index_name, table_name, 'identity')) indexes_created += 1 return indexes_created diff --git a/versions_tests/__init__.py b/versions_tests/__init__.py index 2012501..9dac886 100644 --- a/versions_tests/__init__.py +++ b/versions_tests/__init__.py @@ -1 +1 @@ -default_app_config = 'versions_tests.apps.VersionsTestsConfig' \ No newline at end of file +default_app_config = 'versions_tests.apps.VersionsTestsConfig' diff --git a/versions_tests/admin.py b/versions_tests/admin.py index ed8b776..38bd4de 100644 --- a/versions_tests/admin.py +++ b/versions_tests/admin.py @@ -1,10 +1,12 @@ from django.contrib import admin from versions.admin import VersionedAdmin -from versions_tests.models import City, Student, Observer, Professor, Subject, Teacher, Team, Player, Award, ChainStore, \ +from versions_tests.models import City, Student, Observer, Professor, \ + Subject, Teacher, Team, Player, Award, ChainStore, \ Classroom, WineDrinker, WineDrinkerHat, Wine admin.site.register( - [City, Student, Subject, Teacher, Team, Player, Award, Observer, ChainStore, Professor, Classroom, + [City, Student, Subject, Teacher, Team, Player, Award, Observer, + ChainStore, Professor, Classroom, WineDrinker], VersionedAdmin) admin.site.register([Wine, WineDrinkerHat], admin.ModelAdmin) diff --git a/versions_tests/apps.py b/versions_tests/apps.py index 56618e0..6d30d64 100644 --- a/versions_tests/apps.py +++ b/versions_tests/apps.py @@ -1,6 +1,7 @@ from django.apps import AppConfig -from django.db.models.signals import post_migrate from django.db import connection +from django.db.models.signals import post_migrate + def index_adjustments(sender, using=None, **kwargs): """ @@ -19,6 +20,7 @@ def index_adjustments(sender, using=None, **kwargs): create_current_version_unique_indexes(sender.name, using) create_current_version_unique_identity_indexes(sender.name, using) + class VersionsTestsConfig(AppConfig): name = 'versions_tests' verbose_name = "Versions Tests default application configuration" @@ -33,4 +35,4 @@ def ready(self): :return: None """ if connection.vendor == 'postgresql': - post_migrate.connect(index_adjustments, sender=self) \ No newline at end of file + post_migrate.connect(index_adjustments, sender=self) diff --git a/versions_tests/models.py b/versions_tests/models.py index 0bbbc47..f681ff0 100644 --- a/versions_tests/models.py +++ b/versions_tests/models.py @@ -1,16 +1,18 @@ from django.db.models import CharField, IntegerField, Model, ForeignKey from django.db.models.deletion import DO_NOTHING, PROTECT, SET, SET_NULL -from django.db.models.fields.related import ManyToManyField from django.utils.encoding import python_2_unicode_compatible -from versions.models import Versionable from versions.fields import VersionedManyToManyField, VersionedForeignKey +from versions.models import Versionable def versionable_description(obj): return "<" + str(obj.__class__.__name__) + " object: " + \ - obj.name + " {valid: [" + obj.version_start_date.isoformat() + " | " + ( - obj.version_end_date.isoformat() if obj.version_end_date else "None") + "], created: " + obj.version_birth_date.isoformat() + "}>" + obj.name + " {valid: [" + obj.version_start_date.isoformat() + \ + " | " + \ + (obj.version_end_date.isoformat() + if obj.version_end_date else "None") + \ + "], created: " + obj.version_birth_date.isoformat() + "}>" ############################################ @@ -149,8 +151,10 @@ class Student(Versionable): class Pupil(Versionable): name = CharField(max_length=200) phone_number = CharField(max_length=200) - language_teachers = VersionedManyToManyField('Teacher', related_name='language_students') - science_teachers = VersionedManyToManyField('Teacher', related_name='science_students') + language_teachers = VersionedManyToManyField( + 'Teacher', related_name='language_students') + science_teachers = VersionedManyToManyField( + 'Teacher', related_name='science_students') __str__ = versionable_description @@ -216,10 +220,12 @@ class ChainStore(Versionable): door_color = VersionedForeignKey('Color', related_name='cs') # There are lots of these chain stores. They follow these rules: - # - only one store with the same name and subchain_id can exist in a single city + # - only one store with the same name and subchain_id can exist in a + # single city # - no two stores can share the same door_frame_color and door_color # Yea, well, they want to appeal to people who want to be different. - VERSION_UNIQUE = [['subchain_id', 'city', 'name'], ['door_frame_color', 'door_color']] + VERSION_UNIQUE = [['subchain_id', 'city', 'name'], + ['door_frame_color', 'door_color']] class Color(Versionable): @@ -267,4 +273,5 @@ def __str__(self): # SelfReferencingManyToManyTest models class Person(Versionable): name = CharField(max_length=200) - children = VersionedManyToManyField('self', symmetrical=False, related_name='parents') + children = VersionedManyToManyField('self', symmetrical=False, + related_name='parents') diff --git a/versions_tests/tests/test_models.py b/versions_tests/tests/test_models.py index 0ebeec2..a33464f 100644 --- a/versions_tests/tests/test_models.py +++ b/versions_tests/tests/test_models.py @@ -15,26 +15,30 @@ from __future__ import unicode_literals import datetime -from time import sleep import itertools -from unittest import skip, skipUnless import re import uuid +from time import sleep +from unittest import skip, skipUnless from django import get_version -from django.core.exceptions import SuspiciousOperation, ObjectDoesNotExist, ValidationError +from django.core.exceptions import SuspiciousOperation, ObjectDoesNotExist, \ + ValidationError from django.db import connection, IntegrityError, transaction from django.db.models import Q, Count, Prefetch, Sum from django.db.models.deletion import ProtectedError from django.test import TestCase -from django.utils.timezone import utc from django.utils import six +from django.utils.timezone import utc from versions.exceptions import DeletionOfNonCurrentVersionError -from versions.models import get_utc_now, ForeignKeyRequiresValueError, Versionable +from versions.models import get_utc_now, ForeignKeyRequiresValueError, \ + Versionable from versions_tests.models import ( - Award, B, C1, C2, C3, City, Classroom, Directory, Fan, Mascot, NonFan, Observer, Person, Player, Professor, Pupil, - RabidFan, Student, Subject, Teacher, Team, Wine, WineDrinker, WineDrinkerHat, WizardFan + Award, B, C1, C2, C3, City, Classroom, Directory, Fan, Mascot, NonFan, + Observer, Person, Player, Professor, Pupil, + RabidFan, Student, Subject, Teacher, Team, Wine, WineDrinker, + WineDrinkerHat, WizardFan ) @@ -54,8 +58,9 @@ def set_up_one_object_with_3_versions(): sleep(0.001) t1 = get_utc_now() - # 1ms sleeps are required, since sqlite has a 1ms precision in its datetime stamps - # not inserting the sleep would make t1 point to the next version's start date, which would be wrong + # 1ms sleeps are required, since sqlite has a 1ms precision in its + # datetime stamps not inserting the sleep would make t1 point to the next + # version's start date, which would be wrong sleep(0.001) b = b.clone() @@ -64,7 +69,8 @@ def set_up_one_object_with_3_versions(): sleep(0.001) t2 = get_utc_now() - # 1ms sleeps are required, since sqlite has a 1ms precision in its datetime stamps + # 1ms sleeps are required, since sqlite has a 1ms precision in its + # datetime stamps sleep(0.001) b = b.clone() @@ -76,10 +82,11 @@ def set_up_one_object_with_3_versions(): return b, t1, t2, t3 + def create_three_current_objects(): - b1 = B.objects.create(name = '1') - b2 = B.objects.create(name = '2') - b3 = B.objects.create(name = '3') + b1 = B.objects.create(name='1') + b2 = B.objects.create(name='2') + b3 = B.objects.create(name='3') return b1, b2, b3 @@ -94,7 +101,8 @@ def assertStringEqualIgnoreWhiteSpaces(self, expected, obtained): TestCase.remove_white_spaces = remove_white_spaces -TestCase.assertStringEqualIgnoreWhiteSpaces = assertStringEqualIgnoreWhiteSpaces +TestCase.assertStringEqualIgnoreWhiteSpaces = \ + assertStringEqualIgnoreWhiteSpaces class CreationTest(TestCase): @@ -185,19 +193,25 @@ def test_deleteing_non_current_version_with_queryset(self): B.objects.all().filter(pk__in=pks).delete() - # None of the objects should have been deleted, because they are not current. + # None of the objects should have been deleted, because they are not + # current. self.assertEqual(2, B.objects.all().filter(pk__in=pks).count()) def test_delete_related_with_non_versionable(self): jackie = WineDrinker.objects.create(name='Jackie') - red_sailor_hat = WineDrinkerHat.objects.create(shape='Sailor', color='red', wearer=jackie) + red_sailor_hat = WineDrinkerHat.objects.create(shape='Sailor', + color='red', + wearer=jackie) jackie.delete() self.assertEqual(WineDrinkerHat.objects.count(), 0) self.assertEqual(WineDrinker.objects.current.count(), 0) class DeletionHandlerTest(TestCase): - """Tests that the ForeignKey on_delete parameters have the expected effects""" + """ + Tests that the ForeignKey on_delete parameters have the expected + effects + """ def setUp(self): self.city = City.objects.create(name='c.v1') @@ -226,24 +240,31 @@ def test_on_delete(self): award_qs = Award.objects.current.filter(pk=self.a1.pk)[0] self.assertEqual(1, Team.objects.current.filter(**team_filter).count()) - self.assertEqual(2, Player.objects.current.filter(**player_filter).count()) + self.assertEqual(2, Player.objects.current.filter( + **player_filter).count()) self.assertEqual(2, award_qs.players.count()) - self.assertEqual(2, Mascot.objects.current.filter(**mascot_filter).count()) + self.assertEqual(2, Mascot.objects.current.filter( + **mascot_filter).count()) self.assertEqual(3, Fan.objects.current.filter(**fan_filter).count()) - self.assertEqual(1, RabidFan.objects.current.filter(**rabid_fan_filter).count()) - self.assertEqual(1, NonFan.objects.current.filter(**non_fan_filter).count()) + self.assertEqual(1, RabidFan.objects.current.filter( + **rabid_fan_filter).count()) + self.assertEqual(1, NonFan.objects.current.filter( + **non_fan_filter).count()) self.city.delete() # Cascading deletes are the default behaviour. self.assertEqual(0, Team.objects.current.filter(**team_filter).count()) - self.assertEqual(0, Player.objects.current.filter(**player_filter).count()) - self.assertEqual(0, Mascot.objects.current.filter(**mascot_filter).count()) + self.assertEqual(0, Player.objects.current.filter( + **player_filter).count()) + self.assertEqual(0, Mascot.objects.current.filter( + **mascot_filter).count()) # Many-to-Many relationships are terminated. self.assertEqual(0, award_qs.players.count()) # But a record of them still exists. - self.assertEqual(2, Award.objects.as_of(t1).get(pk=self.a1.pk).players.count()) + self.assertEqual(2, Award.objects.as_of(t1).get( + pk=self.a1.pk).players.count()) # The fans picked another team (on_delete=SET(default_team)) fans = Fan.objects.current.filter(**fan_filter).all() @@ -251,25 +272,34 @@ def test_on_delete(self): fans_teams = {f.team for f in fans} self.assertEqual({self.default_team}, fans_teams) - # The rabid fan doesn't go away if he loses his team, he's still rabid, he just - # doesn't have a team anymore. (on_delete=SET_NULL) - self.assertEqual(1, RabidFan.objects.current.filter(**rabid_fan_filter).count()) + # The rabid fan doesn't go away if he loses his team, he's still rabid, + # he just doesn't have a team anymore. (on_delete=SET_NULL) + self.assertEqual(1, RabidFan.objects.current.filter( + **rabid_fan_filter).count()) rabid_fan = RabidFan.objects.current.filter(**rabid_fan_filter)[0] self.assertEqual(None, rabid_fan.team) - self.assertEqual(self.team.identity, RabidFan.objects.previous_version(rabid_fan).team_id) + self.assertEqual(self.team.identity, + RabidFan.objects.previous_version(rabid_fan).team_id) # The non-fan isn't affected (on_delete=DO_NOTHING) - self.assertEqual(1, NonFan.objects.current.filter(**non_fan_filter).count()) - # This leaves a reference to the deleted team ... hey, that's what DO_NOTHING means. - self.assertEqual(self.team.pk, NonFan.objects.current.filter(**non_fan_filter)[0].team_id) + self.assertEqual(1, NonFan.objects.current.filter( + **non_fan_filter).count()) + # This leaves a reference to the deleted team ... hey, that's what + # DO_NOTHING means. + self.assertEqual(self.team.pk, + NonFan.objects.current.filter(**non_fan_filter)[ + 0].team_id) def test_protected_delete(self): WizardFan.objects.create(name="Gandalf", team=self.team) - # The wizard does his best to protect his team and it's city. (on_delete=PROTECTED) + # The wizard does his best to protect his team and it's city. + # (on_delete=PROTECTED) with self.assertRaises(ProtectedError): self.city.delete() - self.assertEqual(1, Team.objects.current.filter(pk=self.team.pk).count()) - self.assertEqual(1, City.objects.current.filter(pk=self.city.pk).count()) + self.assertEqual(1, + Team.objects.current.filter(pk=self.team.pk).count()) + self.assertEqual(1, + City.objects.current.filter(pk=self.city.pk).count()) def test_deleting_when_m2m_history(self): through = Award._meta.get_field('players').rel.through @@ -277,13 +307,17 @@ def test_deleting_when_m2m_history(self): p1 = Player.objects.create(name="Jessie") a1.players = [p1] self.assertEqual(1, through.objects.filter(player_id=p1.pk).count()) - self.assertEqual(1, through.objects.current.filter(player_id=p1.pk).count()) + self.assertEqual(1, through.objects.current.filter( + player_id=p1.pk).count()) a1.players = [] self.assertEqual(1, through.objects.filter(player_id=p1.pk).count()) - self.assertEqual(0, through.objects.current.filter(player_id=p1.pk).count()) + self.assertEqual(0, through.objects.current.filter( + player_id=p1.pk).count()) p1.delete() self.assertEqual(1, through.objects.filter(player_id=p1.pk).count()) - self.assertEqual(0, through.objects.current.filter(player_id=p1.pk).count()) + self.assertEqual(0, through.objects.current.filter( + player_id=p1.pk).count()) + class CurrentVersionTest(TestCase): def setUp(self): @@ -354,8 +388,9 @@ def test_queryset_without_using_as_of(self): def test_queryset_using_as_of(self): """ - Creates one object having 3 versions and then tests that the as_of method - is returning the correct version when given the corresponding timestamp + Creates one object having 3 versions and then tests that the as_of + method is returning the correct version when given the corresponding + timestamp """ b, t1, t2, t3 = set_up_one_object_with_3_versions() @@ -365,23 +400,23 @@ def test_queryset_using_as_of(self): o = B.objects.as_of(t2).first() self.assertEqual('v2', o.name) - def test_queryset_using_delete(self): """ - Creates 3 objects with all current and then tests that the delete method - makes the current versions a historical version (adding a version_end_date) + Creates 3 objects with all current and then tests that the delete + method makes the current versions a historical version (adding a + version_end_date) """ b1, b2, b3 = create_three_current_objects() self.assertEqual(True, b1.is_current) self.assertEqual(True, b2.is_current) self.assertEqual(True, b3.is_current) - qs = B.objects.filter(name__in = ['1','2','3']).all() + qs = B.objects.filter(name__in=['1', '2', '3']).all() qs.delete() - b1 = B.objects.get(name = '1') - b2 = B.objects.get(name = '2') - b3 = B.objects.get(name = '3') + b1 = B.objects.get(name='1') + b2 = B.objects.get(name='2') + b3 = B.objects.get(name='3') self.assertEqual(False, b1.is_current) self.assertEqual(False, b2.is_current) self.assertEqual(False, b3.is_current) @@ -433,12 +468,14 @@ def test_getting_previous_version(self): def test_getting_nonexistent_next_version(self): """ - Raise an error when trying to look up the next version of the last version of a deleted object. + Raise an error when trying to look up the next version of the last + version of a deleted object. """ v3 = B.objects.as_of(self.t3).first() v3.delete() - self.assertRaises(ObjectDoesNotExist, lambda: B.objects.next_version(v3)) + self.assertRaises(ObjectDoesNotExist, + lambda: B.objects.next_version(v3)) class VersionNavigationAsOfTest(TestCase): @@ -486,8 +523,10 @@ def test_as_of_parameter(self): self.assertEqual(1, city1_t2.team_set.all().count()) self.assertFalse(city1_t2.is_current) - # as_of 'end' for current version means "current", not a certain point in time - city1_current = City.objects.next_version(city1_t2, relations_as_of='end') + # as_of 'end' for current version means "current", not a certain point + # in time + city1_current = City.objects.next_version(city1_t2, + relations_as_of='end') self.assertTrue(city1_current.is_current) self.assertIsNone(city1_current._querytime.time) teams = city1_current.team_set.all() @@ -495,15 +534,18 @@ def test_as_of_parameter(self): self.assertEqual('team1.b', teams[0].name) # as_of 'end' for non-current version means at a certain point in time - city1_previous = City.objects.previous_version(city1_current, relations_as_of='end') + city1_previous = City.objects.previous_version(city1_current, + relations_as_of='end') self.assertIsNotNone(city1_previous._querytime.time) # as_of 'start': returns version at the very start of it's life. - city1_latest_at_birth = City.objects.next_version(city1_t2, relations_as_of='start') + city1_latest_at_birth = City.objects.next_version( + city1_t2, relations_as_of='start') self.assertTrue(city1_latest_at_birth.is_current) self.assertEqual(1, city1_latest_at_birth.team_set.count()) self.assertIsNotNone(city1_latest_at_birth._querytime.time) - self.assertEqual(city1_latest_at_birth._querytime.time, city1_latest_at_birth.version_start_date) + self.assertEqual(city1_latest_at_birth._querytime.time, + city1_latest_at_birth.version_start_date) # as_of datetime: returns a version at a given point in time. city1_t4 = City.objects.next_version(city1_t2, relations_as_of=self.t4) @@ -513,9 +555,11 @@ def test_as_of_parameter(self): self.assertEqual(1, teams.count()) self.assertEqual('team1', teams[0].name) - # as_of None: returns object without time restriction for related objects. - # This means, that all other related object versions that have been associated with - # this object are returned when queried, without applying any time restriction. + # as_of None: returns object without time restriction for related + # objects. + # This means, that all other related object versions that have been + # associated with this object are returned when queried, without + # applying any time restriction. city1_v2 = City.objects.current_version(city1_t2, relations_as_of=None) self.assertFalse(city1_v2._querytime.active) teams = city1_v2.team_set.all() @@ -582,7 +626,8 @@ def test_create_using_constructor(self): def test_wrong_temporal_moving_of_objects(self): """ - Test that the restriction about creating "past objects' are operational: + Test that the restriction about creating "past objects' are + operational: - we cannot give something else than a timestamp to at() - we cannot move anywhere in time an object """ @@ -630,13 +675,15 @@ def test_creating_new_version_of_the_team(self): # ...or the is_current property self.assertTrue(team.is_current) - # We didn't change anything to the players so there must be 2 players in - # the team at time t1... + # We didn't change anything to the players so there must be 2 players + # in the team at time t1... team_at_t1 = Team.objects.as_of(t1).first() # TODO: Remove the following (useless) line, once Django1.8 is working t1_player_queryset = team_at_t1.player_set.all() - # TODO: [django18 compat] The SQL query in t1_player_queryset.query shows that the Team pk value (team_at_t1.id) - # is used to look up the players (instead of the identity property value (team_at_t1.identity)) + # TODO: [django18 compat] The SQL query in t1_player_queryset.query + # shows that the Team pk value (team_at_t1.id) is used to look up the + # players (instead of the identity property value + # (team_at_t1.identity)) self.assertEqual(2, team_at_t1.player_set.count()) # ... and at time t2 @@ -658,35 +705,41 @@ def test_finding_object_with_historic_foreign_key(self): team_at_t2 = Team.objects.as_of(t2).get(identity=team.identity) team_current = Team.objects.current.get(identity=team.identity) - # self.p1's foreign key to self.team is it's original value, which is equal - # to team_at_t1's identity, but not (any longer) team_at_t1's id. + # self.p1's foreign key to self.team is it's original value, which is + # equal to team_at_t1's identity, but not (any longer) team_at_t1's id. # The following queries should all work to return the self.p1 Player: # Using a cross-relation lookup on a non-identity field (team__name): - player_p1_lookup = Player.objects.as_of(t1).get(team__name=team_at_t1.name, name='p1.v1') + player_p1_lookup = Player.objects.as_of(t1).get( + team__name=team_at_t1.name, name='p1.v1') self.assertEqual(self.p1, player_p1_lookup) # Explicitly specifying the identity field in the lookup: - player_p1_explicit = Player.objects.as_of(t1).get(team__identity=team_at_t1.identity, name='p1.v1') + player_p1_explicit = Player.objects.as_of(t1).get( + team__identity=team_at_t1.identity, name='p1.v1') self.assertEqual(self.p1, player_p1_explicit) - # The following three all work because the foreign key actually refers to the identity - # field of the foreign object (which equals the identity of the current object). + # The following three all work because the foreign key actually refers + # to the identity field of the foreign object (which equals the + # identity of the current object). # Providing the current related object to filter on: - player_p1_obj_current = Player.objects.as_of(t1).get(team=team_current, name='p1.v1') + player_p1_obj_current = Player.objects.as_of(t1).get(team=team_current, + name='p1.v1') self.assertEqual(self.p1, player_p1_obj_current) self.assertEqual(team_at_t1, player_p1_obj_current.team) # Providing the related object that existed at the as_of time: - player_p1_obj_as_of = Player.objects.as_of(t1).get(team=team_at_t1, name='p1.v1') + player_p1_obj_as_of = Player.objects.as_of(t1).get(team=team_at_t1, + name='p1.v1') self.assertEqual(self.p1, player_p1_obj_as_of) self.assertEqual(team_at_t1, player_p1_obj_as_of.team) - # Providing the related object that is neither current, nor the one that existed - # at the as_of time, but that has the same identity. - player_p1_obj_other_version = Player.objects.as_of(t1).get(team=team_at_t2, name='p1.v1') + # Providing the related object that is neither current, nor the one + # that existed at the as_of time, but that has the same identity. + player_p1_obj_other_version = Player.objects.as_of(t1).get( + team=team_at_t2, name='p1.v1') self.assertEqual(self.p1, player_p1_obj_other_version) self.assertEqual(team_at_t1, player_p1_obj_other_version.team) @@ -714,9 +767,11 @@ def test_creating_new_version_of_the_player(self): self.assertEqual(2, team.player_set.count()) if six.PY2: - matches = itertools.ifilter(lambda x: x.name == 'p1.v2', team.player_set.all()) + matches = itertools.ifilter(lambda x: x.name == 'p1.v2', + team.player_set.all()) if six.PY3: - matches = filter(lambda x: x.name == 'p1.v2', team.player_set.all()) + matches = filter(lambda x: x.name == 'p1.v2', + team.player_set.all()) self.assertEqual(1, len(list(matches))) def test_adding_one_more_player_to_the_team(self): @@ -758,30 +813,36 @@ def test_removing_and_then_adding_again_same_player(self): t3 = get_utc_now() - # there should be 2 players in the team if we put ourselves back at time t1 + # there should be 2 players in the team if we put ourselves back at + # time t1 team_at_t1 = Team.objects.as_of(t1).first() self.assertEqual(2, team_at_t1.player_set.all().count()) - # there should be 1 players in the team if we put ourselves back at time t2 + # there should be 1 players in the team if we put ourselves back at + # time t2 team_at_t2 = Team.objects.as_of(t2).first() self.assertEqual(1, team_at_t2.player_set.all().count()) p1_at_t2 = Player.objects.as_of(t2).get(name__startswith='p1') self.assertIsNone(p1_at_t2.team) - # there should be 2 players in the team if we put ourselves back at time t3 + # there should be 2 players in the team if we put ourselves back at + # time t3 team_at_t3 = Team.objects.as_of(t3).first() self.assertEqual(2, team_at_t3.player_set.all().count()) - def test_removing_and_then_adding_again_same_player_on_related_object(self): + def test_removing_and_then_adding_again_same_player_on_related_object( + self): t1 = get_utc_now() sleep(0.1) self.team.player_set.remove(self.p1) - # Remember: self.p1 was cloned while removing and is not current anymore!! - # This property has to be documented, since it's critical for developers! - # At this time, there is no mean to replace the contents of self.p1 within the - # remove method + # Remember: self.p1 was cloned while removing and is not current + # anymore!! + # This property has to be documented, since it's critical for + # developers! + # At this time, there is no mean to replace the contents of self.p1 + # within the remove method p1 = Player.objects.current.get(name__startswith='p1') self.assertNotEqual(p1, self.p1) p1.name = 'p1.v2' @@ -800,15 +861,18 @@ def test_removing_and_then_adding_again_same_player_on_related_object(self): t3 = get_utc_now() - # there should be 2 players in the team if we put ourselves back at time t1 + # there should be 2 players in the team if we put ourselves back at + # time t1 team_at_t1 = Team.objects.as_of(t1).first() self.assertEqual(2, team_at_t1.player_set.all().count()) - # there should be 1 players in the team if we put ourselves back at time t2 + # there should be 1 players in the team if we put ourselves back at + # time t2 team_at_t2 = Team.objects.as_of(t2).first() self.assertEqual(1, team_at_t2.player_set.all().count()) - # there should be 2 players in the team if we put ourselves back at time t3 + # there should be 2 players in the team if we put ourselves back at + # time t3 team_at_t3 = Team.objects.as_of(t3).first() self.assertEqual(2, team_at_t3.player_set.all().count()) @@ -844,13 +908,15 @@ def test_creating_new_version_of_parent_directory(self): self.assertTrue(parentdir_v2.is_current) - # We didn't change anything to the subdirs so there must be 2 subdirs in - # the parent at time t1... - parentdir_at_t1 = Directory.objects.as_of(t1).get(name__startswith='parent') + # We didn't change anything to the subdirs so there must be 2 subdirs + # in the parent at time t1... + parentdir_at_t1 = Directory.objects.as_of(t1).get( + name__startswith='parent') self.assertEqual(2, parentdir_at_t1.directory_set.count()) # ... and at time t2 - parentdir_at_t2 = Directory.objects.as_of(t2).get(name__startswith='parent') + parentdir_at_t2 = Directory.objects.as_of(t2).get( + name__startswith='parent') self.assertEqual(2, parentdir_at_t2.directory_set.count()) def test_creating_new_version_of_the_subdir(self): @@ -865,48 +931,57 @@ def test_creating_new_version_of_the_subdir(self): t2 = get_utc_now() # Count all Directory instance versions: - # 3 initial versions + 2 subdirs added to parentdir (implies a clone) + 1 subdir1 that was explicitely cloned = 6 + # 3 initial versions + 2 subdirs added to parentdir (implies a + # clone) + 1 subdir1 that was explicitely cloned = 6 self.assertEqual(6, Directory.objects.all().count()) # at t1 there is no directory named 'subdir1.v2' - parentdir_at_t1 = Directory.objects.as_of(t1).get(name__startswith='parent') + parentdir_at_t1 = Directory.objects.as_of(t1).get( + name__startswith='parent') self.assertEqual(2, parentdir_at_t1.directory_set.count()) for subdir in parentdir_at_t1.directory_set.all(): self.assertNotEqual('subdir1.v2', subdir.name) # at t2 there must be 2 directories and ... - parentdir_at_t2 = Directory.objects.as_of(t2).get(name__startswith='parent') + parentdir_at_t2 = Directory.objects.as_of(t2).get( + name__startswith='parent') self.assertEqual(2, parentdir_at_t2.directory_set.count()) # ... and one of then is named 'subdir1.v2' if six.PY2: - matches = itertools.ifilter(lambda x: x.name == 'subdir1.v2', parentdir_at_t2.directory_set.all()) + matches = itertools.ifilter(lambda x: x.name == 'subdir1.v2', + parentdir_at_t2.directory_set.all()) if six.PY3: - matches = filter(lambda x: x.name == 'subdir1.v2', parentdir_at_t2.directory_set.all()) + matches = filter(lambda x: x.name == 'subdir1.v2', + parentdir_at_t2.directory_set.all()) self.assertEqual(1, len(list(matches))) def test_adding_more_subdir(self): t1 = get_utc_now() sleep(0.1) - current_parentdir = Directory.objects.current.get(name__startswith='parent') + current_parentdir = Directory.objects.current.get( + name__startswith='parent') self.assertEqual(2, current_parentdir.directory_set.all().count()) sleep(0.1) Directory.objects.create(name='subdir3.v1', parent=current_parentdir) t2 = get_utc_now() - # There must be 3 subdirectories in the parent directory now. Since current_parentdir has never had an as_of - # specified, it will reflect the current state. + # There must be 3 subdirectories in the parent directory now. Since + # current_parentdir has never had an as_of specified, it will reflect + # the current state. self.assertEqual(3, current_parentdir.directory_set.all().count()) # there should be 2 directories in the parent directory at time t1 - parentdir_at_t1 = Directory.objects.as_of(t1).filter(name='parent.v1').first() + parentdir_at_t1 = Directory.objects.as_of(t1).filter( + name='parent.v1').first() self.assertEqual(2, parentdir_at_t1.directory_set.all().count()) # there should be 3 directories in the parent directory at time t2 - parentdir_at_t2 = Directory.objects.as_of(t2).filter(name='parent.v1').first() + parentdir_at_t2 = Directory.objects.as_of(t2).filter( + name='parent.v1').first() self.assertEqual(3, parentdir_at_t2.directory_set.all().count()) def test_removing_and_then_adding_again_same_subdir(self): @@ -922,7 +997,8 @@ def test_removing_and_then_adding_again_same_subdir(self): t2 = get_utc_now() sleep(0.1) - current_parentdir = Directory.objects.current.get(name__startswith='parent') + current_parentdir = Directory.objects.current.get( + name__startswith='parent') subdir1_v3 = subdir1_v2.clone() subdir1_v3.parent = current_parentdir subdir1_v3.name = 'subdir1.v3' @@ -931,15 +1007,18 @@ def test_removing_and_then_adding_again_same_subdir(self): t3 = get_utc_now() # there should be 2 directories in the parent directory at time t1 - parentdir_at_t1 = Directory.objects.as_of(t1).get(name__startswith='parent') + parentdir_at_t1 = Directory.objects.as_of(t1).get( + name__startswith='parent') self.assertEqual(2, parentdir_at_t1.directory_set.all().count()) # there should be 1 directory in the parent directory at time t2 - parentdir_at_t2 = Directory.objects.as_of(t2).get(name__startswith='parent') + parentdir_at_t2 = Directory.objects.as_of(t2).get( + name__startswith='parent') self.assertEqual(1, parentdir_at_t2.directory_set.all().count()) # there should be 2 directories in the parent directory at time t3 - parentdir_at_t3 = Directory.objects.as_of(t3).get(name__startswith='parent') + parentdir_at_t3 = Directory.objects.as_of(t3).get( + name__startswith='parent') self.assertEqual(2, parentdir_at_t3.directory_set.all().count()) @@ -1012,34 +1091,46 @@ def setUp(self): def test_filtering_on_the_other_side_of_the_relation(self): self.assertEqual(1, Team.objects.all().count()) self.assertEqual(1, Team.objects.as_of(self.t1).all().count()) - self.assertEqual(3, Player.objects.filter(name__startswith='p1').all().count()) - self.assertEqual(3, Player.objects.filter(name__startswith='p2').all().count()) - self.assertEqual(1, Player.objects.as_of(self.t1).filter(name='p1.v1').all().count()) - self.assertEqual(1, Player.objects.as_of(self.t1).filter(name='p2.v1').all().count()) + self.assertEqual(3, Player.objects.filter( + name__startswith='p1').all().count()) + self.assertEqual(3, Player.objects.filter( + name__startswith='p2').all().count()) + self.assertEqual(1, Player.objects.as_of(self.t1).filter( + name='p1.v1').all().count()) + self.assertEqual(1, Player.objects.as_of(self.t1).filter( + name='p2.v1').all().count()) # at t1 there should be one team with two players - team_p1 = Team.objects.as_of(self.t1).filter(player__name='p1.v1').first() + team_p1 = Team.objects.as_of(self.t1).filter( + player__name='p1.v1').first() self.assertIsNotNone(team_p1) - team_p2 = Team.objects.as_of(self.t1).filter(player__name='p2.v1').first() + team_p2 = Team.objects.as_of(self.t1).filter( + player__name='p2.v1').first() self.assertIsNotNone(team_p2) # at t2 there should be one team with one single player called 'p1.v1' - team_p1 = Team.objects.as_of(self.t2).filter(player__name='p1.v1').first() - team_p2 = Team.objects.as_of(self.t2).filter(player__name='p2.v2').first() + team_p1 = Team.objects.as_of(self.t2).filter( + player__name='p1.v1').first() + team_p2 = Team.objects.as_of(self.t2).filter( + player__name='p2.v2').first() self.assertIsNotNone(team_p1) self.assertEqual(team_p1.name, 't.v1') self.assertEqual(1, team_p1.player_set.count()) self.assertIsNone(team_p2) # at t3 there should be one team with no players - team_p1 = Team.objects.as_of(self.t3).filter(player__name='p1.v2').first() - team_p2 = Team.objects.as_of(self.t3).filter(player__name='p2.v2').first() + team_p1 = Team.objects.as_of(self.t3).filter( + player__name='p1.v2').first() + team_p2 = Team.objects.as_of(self.t3).filter( + player__name='p2.v2').first() self.assertIsNone(team_p1) self.assertIsNone(team_p2) # at t4 there should be one team with two players again! - team_p1 = Team.objects.as_of(self.t4).filter(player__name='p1.v3').first() - team_p2 = Team.objects.as_of(self.t4).filter(player__name='p2.v3').first() + team_p1 = Team.objects.as_of(self.t4).filter( + player__name='p1.v3').first() + team_p2 = Team.objects.as_of(self.t4).filter( + player__name='p2.v3').first() self.assertIsNotNone(team_p1) self.assertEqual(team_p1.name, 't.v1') self.assertIsNotNone(team_p2) @@ -1049,24 +1140,29 @@ def test_filtering_on_the_other_side_of_the_relation(self): def test_simple_filter_using_q_objects(self): """ - This tests explicitely the filtering of a versioned object using Q objects. - However, since this is done implicetly with every call to 'as_of', this test is redundant but is kept for - explicit test coverage + This tests explicitely the filtering of a versioned object using Q + objects. + However, since this is done implicetly with every call to 'as_of', + this test is redundant but is kept for explicit test coverage """ t1_players = list( - Player.objects.as_of(self.t1).filter(Q(name__startswith='p1') | Q(name__startswith='p2')).values_list( + Player.objects.as_of(self.t1).filter(Q(name__startswith='p1') | Q( + name__startswith='p2')).values_list( 'name', flat=True)) self.assertEqual(2, len(t1_players)) self.assertListEqual(sorted(t1_players), sorted(['p1.v1', 'p2.v1'])) def test_filtering_for_deleted_player_at_t5(self): - team_none = Team.objects.as_of(self.t5).filter(player__name__startswith='p1').first() + team_none = Team.objects.as_of(self.t5).filter( + player__name__startswith='p1').first() self.assertIsNone(team_none) - @skipUnless(connection.vendor == 'sqlite', 'SQL is database specific, only sqlite is tested here.') + @skipUnless(connection.vendor == 'sqlite', + 'SQL is database specific, only sqlite is tested here.') def test_query_created_by_filtering_for_deleted_player_at_t5(self): - team_none_queryset = Team.objects.as_of(self.t5).filter(player__name__startswith='p1') + team_none_queryset = Team.objects.as_of(self.t5).filter( + player__name__startswith='p1') # Validating the current query prior to analyzing the generated SQL self.assertEqual([], list(team_none_queryset)) team_none_query = str(team_none_queryset.query) @@ -1105,13 +1201,16 @@ def test_query_created_by_filtering_for_deleted_player_at_t5(self): ) AND "{team_table}"."version_start_date" <= {ts_wo_tz} ) - """.format(ts=t5_utc_w_tz, ts_wo_tz=t5_utc_wo_tz, team_table=team_table, player_table=player_table) - self.assertStringEqualIgnoreWhiteSpaces(expected_query, team_none_query) + """.format(ts=t5_utc_w_tz, ts_wo_tz=t5_utc_wo_tz, + team_table=team_table, player_table=player_table) + self.assertStringEqualIgnoreWhiteSpaces(expected_query, + team_none_query) class MultiM2MTest(TestCase): """ - Testing multiple ManyToMany-relationships on a same class; the following story was chosen: + Testing multiple ManyToMany-relationships on a same class; the following + story was chosen: Classroom <--> Student <--> Professor @@ -1120,13 +1219,19 @@ class MultiM2MTest(TestCase): def setUp(self): # -------------- t0: - mr_biggs = Professor.objects.create(name='Mr. Biggs', address='123 Mainstreet, Somewhere', - phone_number='123') - ms_piggy = Professor.objects.create(name='Ms. Piggy', address='82 Leicester Street, London', - phone_number='987') - - gym = Classroom.objects.create(name='Sports room', building='The big one over there') - phylo = Classroom.objects.create(name='Philosophy lectures', building='The old one') + mr_biggs = Professor.objects.create( + name='Mr. Biggs', + address='123 Mainstreet, Somewhere', + phone_number='123') + ms_piggy = Professor.objects.create( + name='Ms. Piggy', + address='82 Leicester Street, London', + phone_number='987') + + gym = Classroom.objects.create(name='Sports room', + building='The big one over there') + phylo = Classroom.objects.create(name='Philosophy lectures', + building='The old one') annika = Student.objects.create(name='Annika') annika.professors.add(mr_biggs) @@ -1151,11 +1256,13 @@ def setUp(self): mr_biggs.save() # Mr. Evans gets hired - mr_evans = Professor.objects.create(name='Mr. Evans', address='lives in a camper', + mr_evans = Professor.objects.create(name='Mr. Evans', + address='lives in a camper', phone_number='456') # A lab gets built - lab = Classroom.objects.create(name='Physics and stuff', building='The old one') + lab = Classroom.objects.create(name='Physics and stuff', + building='The old one') self.t1 = get_utc_now() sleep(0.1) @@ -1216,7 +1323,8 @@ def test_t0(self): for student in gym_t0.students.all(): self.assertIn(student.name, ['Annika', 'Benny']) - female_professors_t0 = Classroom.objects.as_of(self.t0).get(name__startswith='Philo'). \ + female_professors_t0 = Classroom.objects.as_of(self.t0).get( + name__startswith='Philo'). \ students.first(). \ professors.filter(name__startswith='Ms') self.assertEqual(len(female_professors_t0), 1) @@ -1229,10 +1337,12 @@ def test_t1(self): self.assertEqual(mr_evans_t1.students.count(), 0) self.assertEqual(list(mr_evans_t1.students.all()), []) - self.assertEqual(Classroom.objects.as_of(self.t1).get(name__startswith="Physics").students.count(), + self.assertEqual(Classroom.objects.as_of(self.t1).get( + name__startswith="Physics").students.count(), 0) - self.assertEqual(Professor.objects.as_of(self.t1).get(name__contains='Biggs').address, + self.assertEqual(Professor.objects.as_of(self.t1).get( + name__contains='Biggs').address, 'Thunplatz, Bern') def test_t2(self): @@ -1241,25 +1351,30 @@ def test_t2(self): self.assertEqual(len(evans_students), 1) self.assertEqual(evans_students[0].name, 'Sophie') # Checking Sophie's rooms - self.assertIn('Physics and stuff', list(evans_students[0].classrooms.values_list('name', flat=True))) + self.assertIn('Physics and stuff', list( + evans_students[0].classrooms.values_list('name', flat=True))) self.assertEqual(evans_students[0].classrooms.count(), 1) def test_t3(self): # Find all professors who teach Annika - annikas_professors_t3 = Professor.objects.as_of(self.t3).filter(students__name='Annika') + annikas_professors_t3 = Professor.objects.as_of(self.t3).filter( + students__name='Annika') self.assertEqual(annikas_professors_t3.count(), 3) - self.assertIn('Mr. Evans', list(annikas_professors_t3.values_list('name', flat=True))) + self.assertIn('Mr. Evans', list( + annikas_professors_t3.values_list('name', flat=True))) def test_number_of_queries_stay_constant(self): """ - We had a situation where the number of queries to get data from a m2m relations - was proportional to the number of objects in the relations. For example if one - object was related with 10 others it will require 2 + 2x10 queries to get data. - Obviously this is not something one would want and this problem is really - difficult to find out as the behavior is correct. There is just too many queries - generated to carry on the work and therefore the system's performance sinks. - This test is here to make sure we don't go back accidentally to such a situation - by making sure the number of queries stays the same. + We had a situation where the number of queries to get data from a m2m + relations was proportional to the number of objects in the relations. + For example if one object was related with 10 others it will require + 2 + 2x10 queries to get data. + Obviously this is not something one would want and this problem is + really difficult to find out as the behavior is correct. There is just + too many queries generated to carry on the work and therefore the + system's performance sinks. + This test is here to make sure we don't go back accidentally to such a + situation by making sure the number of queries stays the same. """ annika = Student.objects.current.get(name='Annika') with self.assertNumQueries(1): @@ -1271,20 +1386,24 @@ def test_adding_multiple_related_objects(self): benny = Student.objects.current.get(name='Benny') benny.professors.add(*all_professors) benny.as_of = get_utc_now() - # This was once failing because _add_items() was filtering out items it didn't need to re-add, - # but it was not restricting the query to find those objects with any as-of time. - self.assertSetEqual(set(list(benny.professors.all())), set(all_professors)) + # This was once failing because _add_items() was filtering out items + # it didn't need to re-add, but it was not restricting the query to + # find those objects with any as-of time. + self.assertSetEqual(set(list(benny.professors.all())), + set(all_professors)) def test_adding_multiple_related_objects_using_a_valid_timestamp(self): all_professors = list(Professor.objects.current.all()) benny = Student.objects.current.get(name='Benny') benny.professors.add_at(self.t4, *all_professors) # Test the addition of objects in the past - self.assertSetEqual(set(list(benny.professors.all())), set(all_professors)) + self.assertSetEqual(set(list(benny.professors.all())), + set(all_professors)) @skip("To be implemented") def test_adding_multiple_related_objects_using_an_invalid_timestamp(self): - # TODO: See test_adding_multiple_related_objects and make use of add_at and a timestamp laying outside the + # TODO: See test_adding_multiple_related_objects and make use of + # add_at and a timestamp laying outside the # current object's lifetime # Create a new version beyond self.t4 @@ -1294,24 +1413,31 @@ def test_adding_multiple_related_objects_using_an_invalid_timestamp(self): benny.save() all_professors = list(Professor.objects.current.all()) - # Test the addition of objects in the past with a timestamp that points before the current - # versions lifetime - # TODO: Raise an error when adding objects outside the lifetime of an object (even if it's a discouraged use case) - self.assertRaises(ValueError, lambda: benny.professors.add_at(self.t4, *all_professors)) + # Test the addition of objects in the past with a timestamp that + # points before the current versions lifetime + # TODO: Raise an error when adding objects outside the lifetime of an + # object (even if it's a discouraged use case) + self.assertRaises(ValueError, + lambda: benny.professors.add_at( + self.t4, + *all_professors)) def test_querying_multiple_related_objects_on_added_object(self): # In the setUp, Benny had a professor, and then no more. all_professors = list(Professor.objects.current.all()) benny = Student.objects.current.get(name='Benny') benny.professors.add(*all_professors) - # This was once failing because benny's as_of time had been set by the call to Student.objects.current, - # and was being propagated to the query selecting the relations, which were added after as_of was set. - self.assertSetEqual(set(list(benny.professors.all())), set(all_professors)) + # This was once failing because benny's as_of time had been set by the + # call to Student.objects.current, + # and was being propagated to the query selecting the relations, which + # were added after as_of was set. + self.assertSetEqual(set(list(benny.professors.all())), + set(all_professors)) def test_direct_assignment_of_relations(self): """ - Ensure that when relations that are directly set (e.g. not via add() or remove(), - that their versioning information is kept. + Ensure that when relations that are directly set (e.g. not via add() + or remove(), that their versioning information is kept. """ benny = Student.objects.current.get(name='Benny') all_professors = list(Professor.objects.current.all()) @@ -1348,40 +1474,52 @@ def test_direct_assignment_of_relations(self): benny5 = Student.objects.as_of(t5).get(identity=benny.identity) self.assertSetEqual(set(list(benny0.professors.all())), set()) - self.assertSetEqual(set(list(benny1.professors.all())), set([first_professor])) - self.assertSetEqual(set(list(benny2.professors.all())), set(all_professors)) - self.assertSetEqual(set(list(benny3.professors.all())), set([last_professor])) - self.assertSetEqual(set([o.pk for o in benny4.professors.all()]), set(some_professor_ids)) + self.assertSetEqual(set(list(benny1.professors.all())), + set([first_professor])) + self.assertSetEqual(set(list(benny2.professors.all())), + set(all_professors)) + self.assertSetEqual(set(list(benny3.professors.all())), + set([last_professor])) + self.assertSetEqual(set([o.pk for o in benny4.professors.all()]), + set(some_professor_ids)) self.assertSetEqual(set(list(benny5.professors.all())), set()) def test_annotations_and_aggregations(self): - # Annotations and aggreagations should work with .current objects as well as historical .as_of() objects. + # Annotations and aggreagations should work with .current objects as + # well as historical .as_of() objects. self.assertEqual(4, - Professor.objects.current.annotate(num_students=Count('students')).aggregate( + Professor.objects.current.annotate( + num_students=Count('students')).aggregate( sum=Sum('num_students'))['sum'] - ) + ) self.assertTupleEqual((1, 1), - (Professor.objects.current.annotate(num_students=Count('students')).get( + (Professor.objects.current.annotate( + num_students=Count('students')).get( name='Mr. Biggs').num_students, - Professor.objects.current.get(name='Mr. Biggs').students.count()) - ) + Professor.objects.current.get( + name='Mr. Biggs').students.count()) + ) self.assertTupleEqual((2, 2), - (Professor.objects.as_of(self.t1).annotate(num_students=Count('students')).get( + (Professor.objects.as_of(self.t1).annotate( + num_students=Count('students')).get( name='Mr. Biggs').num_students, - Professor.objects.as_of(self.t1).get(name='Mr. Biggs').students.count()) - ) + Professor.objects.as_of(self.t1).get( + name='Mr. Biggs').students.count()) + ) - # Results should include records for which the annotation returns a 0 count, too. - # This requires that the generated LEFT OUTER JOIN condition includes a clause - # to restrict the records according to the desired as_of time. - self.assertEqual(3, len(Student.objects.current.annotate(num_teachers=Count('professors')).all())) + # Results should include records for which the annotation returns a 0 + # count, too. + # This requires that the generated LEFT OUTER JOIN condition includes + # a clause to restrict the records according to the desired as_of time. + self.assertEqual(3, len(Student.objects.current.annotate( + num_teachers=Count('professors')).all())) def test_constant_number_of_queries_when_cloning_m2m_related_object(self): """ - This test aims to verify whether the number of queries against the DB remains constant, - even if the number of M2M relations has grown. + This test aims to verify whether the number of queries against the DB + remains constant, even if the number of M2M relations has grown. This test was necessary in order to verify changes from PR #44 """ annika = Student.objects.current.get(name='Annika') @@ -1396,14 +1534,16 @@ def test_constant_number_of_queries_when_cloning_m2m_related_object(self): # o 1 update of the later version # - 5 for the professors relationship # o 1 for selecting all concerned professor objects - # o 1 for selecting all concerned intermediate table entries (student_professor) + # o 1 for selecting all concerned intermediate table entries + # (student_professor) # o 1 for updating current intermediate entry versions # o 1 for non-current rel-entries pointing the annika-object # (there's 1 originating from the clone-operation on mr_biggs) # o 1 for inserting new versions # - 4 for the classrooms M2M relationship # o 1 for selecting all concerned classroom objects - # o 1 for selecting all concerned intermediate table entries (student_classroom) + # o 1 for selecting all concerned intermediate table entries + # (student_classroom) # o 1 for updating current intermediate entry versions # o 0 for non-current rel-entries pointing the annika-object # o 1 for inserting new versions @@ -1412,8 +1552,8 @@ def test_constant_number_of_queries_when_cloning_m2m_related_object(self): def test_no_duplicate_m2m_entries_after_cloning_related_object(self): """ - This test ensures there are no duplicate entries added when cloning an object participating - in a M2M relationship. + This test ensures there are no duplicate entries added when cloning an + object participating in a M2M relationship. It ensures the absence of duplicate entries on all modified levels: - at the object-model level - at any relationship level (intermediary tables) @@ -1428,14 +1568,20 @@ def test_no_duplicate_m2m_entries_after_cloning_related_object(self): # Check the PRE-CLONE state annika_pre_clone = annika # There's 1 Student instance (named Annika) - self.assertEqual(1, Student.objects.filter(identity=annika.identity).count()) - # There are 4 links to 3 professors (Mr. Biggs has been cloned once when setting up, thus 1 additional link) - student_professor_links = list(student_professors_mgr.through.objects.filter( - **{student_professors_mgr.source_field_name: annika_pre_clone.id})) + self.assertEqual(1, Student.objects.filter( + identity=annika.identity).count()) + # There are 4 links to 3 professors (Mr. Biggs has been cloned once + # when setting up, thus 1 additional link) + student_professor_links = list( + student_professors_mgr.through.objects.filter( + **{student_professors_mgr.source_field_name: + annika_pre_clone.id})) self.assertEqual(4, len(student_professor_links)) # There are 3 links to classrooms - student_classroom_links = list(student_classrooms_mgr.through.objects.filter( - **{student_classrooms_mgr.source_field_name: annika_pre_clone.id})) + student_classroom_links = list( + student_classrooms_mgr.through.objects.filter( + **{student_classrooms_mgr.source_field_name: + annika_pre_clone.id})) self.assertEqual(3, len(student_classroom_links)) # Do the CLONE that also impacts the number of linking entries @@ -1443,36 +1589,50 @@ def test_no_duplicate_m2m_entries_after_cloning_related_object(self): # Check the POST-CLONE state # There are 2 Student instances (named Annika) - self.assertEqual(2, Student.objects.filter(identity=annika.identity).count()) + self.assertEqual(2, Student.objects.filter( + identity=annika.identity).count()) # There are 7 links to 3 professors - # - 4 of them are pointing the previous annika-object (including the non-current link to Mr. Biggs) - # - 3 of them are pointing the current annika-object (only current links were taken over) - student_professor_links = list(student_professors_mgr.through.objects.filter( - Q(**{student_professors_mgr.source_field_name: annika_pre_clone.id}) | - Q(**{student_professors_mgr.source_field_name: annika_post_clone.id}))) + # - 4 of them are pointing the previous annika-object (including the + # non-current link to Mr. Biggs) + # - 3 of them are pointing the current annika-object (only current + # links were taken over) + student_professor_links = list( + student_professors_mgr.through.objects.filter( + Q(**{student_professors_mgr.source_field_name: + annika_pre_clone.id}) | + Q(**{student_professors_mgr.source_field_name: + annika_post_clone.id}))) self.assertEqual(7, len(student_professor_links)) self.assertEqual(4, student_professors_mgr.through.objects.filter( - Q(**{student_professors_mgr.source_field_name: annika_pre_clone.id})).count()) + Q(**{student_professors_mgr.source_field_name: + annika_pre_clone.id})).count()) self.assertEqual(3, student_professors_mgr.through.objects.filter( - Q(**{student_professors_mgr.source_field_name: annika_post_clone.id})).count()) + Q(**{student_professors_mgr.source_field_name: + annika_post_clone.id})).count()) # There are 6 links to 3 professors # - 3 of them are pointing the previous annika-object # - 3 of them are pointing the current annika-object - student_classroom_links = list(student_classrooms_mgr.through.objects.filter( - Q(**{student_classrooms_mgr.source_field_name: annika_pre_clone.id}) | - Q(**{student_classrooms_mgr.source_field_name: annika_post_clone.id}))) + student_classroom_links = list( + student_classrooms_mgr.through.objects.filter( + Q(**{student_classrooms_mgr.source_field_name: + annika_pre_clone.id}) | + Q(**{student_classrooms_mgr.source_field_name: + annika_post_clone.id}))) self.assertEqual(6, len(student_classroom_links)) self.assertEqual(3, student_classrooms_mgr.through.objects.filter( - Q(**{student_classrooms_mgr.source_field_name: annika_pre_clone.id})).count()) + Q(**{student_classrooms_mgr.source_field_name: + annika_pre_clone.id})).count()) self.assertEqual(3, student_classrooms_mgr.through.objects.filter( - Q(**{student_classrooms_mgr.source_field_name: annika_post_clone.id})).count()) + Q(**{student_classrooms_mgr.source_field_name: + annika_post_clone.id})).count()) class MultiM2MToSameTest(TestCase): """ - This test case shall test the correct functionality of the following relationship: + This test case shall test the correct functionality of the following + relationship: Teacher <--> Pupil <--> Teacher """ @@ -1483,9 +1643,11 @@ def setUp(self): erika = Pupil.objects.create(name='Erika', phone_number='456') ms_sue = Teacher.objects.create(name='Ms. Sue', domain='English') - ms_klishina = Teacher.objects.create(name='Ms. Klishina', domain='Russian') + ms_klishina = Teacher.objects.create(name='Ms. Klishina', + domain='Russian') - mr_kazmirek = Teacher.objects.create(name='Mr. Kazmirek', domain='Math') + mr_kazmirek = Teacher.objects.create(name='Mr. Kazmirek', + domain='Math') ms_mayer = Teacher.objects.create(name='Ms. Mayer', domain='Chemistry') self.t0 = get_utc_now() @@ -1538,12 +1700,14 @@ def test_t1(self): def test_t2(self): billy_t2 = Pupil.objects.as_of(self.t2).get(name='Billy') self.assertEqual(billy_t2.language_teachers.count(), 1) - self.assertEqual(billy_t2.language_teachers.first().name, 'Ms. Klishina') + self.assertEqual(billy_t2.language_teachers.first().name, + 'Ms. Klishina') def test_t3(self): erika_t3 = Pupil.objects.as_of(self.t3).get(name='Erika') self.assertEqual(erika_t3.science_teachers.count(), 1) - self.assertEqual(erika_t3.science_teachers.first().name, 'Mr. Kazmirek') + self.assertEqual(erika_t3.science_teachers.first().name, + 'Mr. Kazmirek') class SelfReferencingManyToManyTest(TestCase): @@ -1566,7 +1730,9 @@ def test_child_relationship(self): def test_relationship_spanning_query(self): mips_parents_qs = Person.objects.current.filter(children__name='Mips') - self.assertSetEqual({'Max', 'Maude'}, {p.name for p in mips_parents_qs}) + self.assertSetEqual({'Max', 'Maude'}, + {p.name for p in mips_parents_qs}) + class ManyToManyFilteringTest(TestCase): def setUp(self): @@ -1626,15 +1792,16 @@ def test_filtering_one_jump(self): def test_inexistent_relations_at_t0(self): """ - Test return value when there is no element assigned to a M2M relationship + Test return value when there is no element assigned to a M2M + relationship """ c1_at_t0 = C1.objects.as_of(self.t0).get() self.assertEqual([], list(c1_at_t0.c2s.all())) def test_filtering_one_jump_with_version_at_t1(self): """ - Test filtering m2m relations with 2 models with propagation of querytime - information across all tables + Test filtering m2m relations with 2 models with propagation of + querytime information across all tables """ should_be_c1 = C1.objects.as_of(self.t1) \ .filter(c2s__name__startswith='c2').first() @@ -1655,11 +1822,12 @@ def test_filtering_one_jump_with_version_at_t3(self): .filter(c3s__name__startswith='c3a').first() self.assertIsNone(should_be_none) - @skipUnless(connection.vendor == 'sqlite', 'SQL is database specific, only sqlite is tested here.') + @skipUnless(connection.vendor == 'sqlite', + 'SQL is database specific, only sqlite is tested here.') def test_query_created_by_filtering_one_jump_with_version_at_t1(self): """ - Test filtering m2m relations with 2 models with propagation of querytime - information across all tables + Test filtering m2m relations with 2 models with propagation of + querytime information across all tables """ should_be_c1_queryset = C1.objects.as_of(self.t1) \ .filter(c2s__name__startswith='c2') @@ -1667,10 +1835,12 @@ def test_query_created_by_filtering_one_jump_with_version_at_t1(self): t1_string = self.t1.isoformat().replace('T', ' ') t1_no_tz_string = t1_string[:-6] expected_query = """ - SELECT "versions_tests_c1"."id", "versions_tests_c1"."identity", + SELECT "versions_tests_c1"."id", + "versions_tests_c1"."identity", "versions_tests_c1"."version_start_date", "versions_tests_c1"."version_end_date", - "versions_tests_c1"."version_birth_date", "versions_tests_c1"."name" + "versions_tests_c1"."version_birth_date", + "versions_tests_c1"."name" FROM "versions_tests_c1" INNER JOIN "versions_tests_c1_c2s" ON ( "versions_tests_c1"."id" = "versions_tests_c1_c2s"."c1_id" @@ -1698,21 +1868,22 @@ def test_query_created_by_filtering_one_jump_with_version_at_t1(self): ) """.format(time=t1_string, time_no_tz=t1_no_tz_string) - self.assertStringEqualIgnoreWhiteSpaces(expected_query, should_be_c1_query) + self.assertStringEqualIgnoreWhiteSpaces(expected_query, + should_be_c1_query) def test_filtering_one_jump_reverse(self): """ - Test filtering m2m relations with 2 models but navigating relation in the - reverse direction + Test filtering m2m relations with 2 models but navigating relation in + the reverse direction """ should_be_c3 = C3.objects.filter(c2s__name__startswith='c2').first() self.assertIsNotNone(should_be_c3) def test_filtering_one_jump_reverse_with_version_at_t1(self): """ - Test filtering m2m relations with 2 models with propagation of querytime - information across all tables and navigating the relation in the reverse - direction + Test filtering m2m relations with 2 models with propagation of + querytime information across all tables and navigating the relation + in the reverse direction """ should_be_c3 = C3.objects.as_of(self.t1) \ .filter(c2s__name__startswith='c2').first() @@ -1724,13 +1895,14 @@ def test_filtering_two_jumps(self): Test filtering m2m relations with 3 models """ with self.assertNumQueries(1) as counter: - should_be_c1 = C1.objects.filter(c2s__c3s__name__startswith='c3').first() + should_be_c1 = C1.objects.filter( + c2s__c3s__name__startswith='c3').first() self.assertIsNotNone(should_be_c1) def test_filtering_two_jumps_with_version_at_t1(self): """ - Test filtering m2m relations with 3 models with propagation of querytime - information across all tables + Test filtering m2m relations with 3 models with propagation of + querytime information across all tables """ with self.assertNumQueries(3) as counter: should_be_none = C1.objects.as_of(self.t1) \ @@ -1746,7 +1918,8 @@ def test_filtering_two_jumps_with_version_at_t1(self): .filter(c2s__c3s__name__startswith='c3').all().count() self.assertEqual(1, count) - @skipUnless(connection.vendor == 'sqlite', 'SQL is database specific, only sqlite is tested here.') + @skipUnless(connection.vendor == 'sqlite', + 'SQL is database specific, only sqlite is tested here.') def test_query_created_by_filtering_two_jumps_with_version_at_t1(self): """ Investigate correctness of the resulting SQL query @@ -1757,9 +1930,12 @@ def test_query_created_by_filtering_two_jumps_with_version_at_t1(self): t1_string = self.t1.isoformat().replace('T', ' ') t1_no_tz_string = t1_string[:-6] expected_query = """ - SELECT "versions_tests_c1"."id", "versions_tests_c1"."identity", - "versions_tests_c1"."version_start_date", "versions_tests_c1"."version_end_date", - "versions_tests_c1"."version_birth_date", "versions_tests_c1"."name" + SELECT "versions_tests_c1"."id", + "versions_tests_c1"."identity", + "versions_tests_c1"."version_start_date", + "versions_tests_c1"."version_end_date", + "versions_tests_c1"."version_birth_date", + "versions_tests_c1"."name" FROM "versions_tests_c1" INNER JOIN "versions_tests_c1_c2s" ON ( "versions_tests_c1"."id" = "versions_tests_c1_c2s"."c1_id" @@ -1796,12 +1972,14 @@ def test_query_created_by_filtering_two_jumps_with_version_at_t1(self): AND "versions_tests_c1"."version_start_date" <= {time_no_tz} ) """.format(time=t1_string, time_no_tz=t1_no_tz_string) - self.assertStringEqualIgnoreWhiteSpaces(expected_query, should_be_c1_query) + self.assertStringEqualIgnoreWhiteSpaces(expected_query, + should_be_c1_query) def test_filtering_two_jumps_with_version_at_t2(self): """ - Test filtering m2m relations with 3 models with propagation of querytime - information across all tables but this time at point in time t2 + Test filtering m2m relations with 3 models with propagation of + querytime information across all tables but this time at point in time + t2 """ with self.assertNumQueries(2) as counter: should_be_c1 = C1.objects.as_of(self.t2) \ @@ -1814,8 +1992,9 @@ def test_filtering_two_jumps_with_version_at_t2(self): def test_filtering_two_jumps_with_version_at_t3(self): """ - Test filtering m2m relations with 3 models with propagation of querytime - information across all tables but this time at point in time t3 + Test filtering m2m relations with 3 models with propagation of + querytime information across all tables but this time at point in time + t3 """ with self.assertNumQueries(3) as counter: # Should be None, since object 'c3a' does not exist anymore at t3 @@ -1833,18 +2012,19 @@ def test_filtering_two_jumps_with_version_at_t3(self): def test_filtering_two_jumps_reverse(self): """ - Test filtering m2m relations with 3 models but navigating relation in the - reverse direction + Test filtering m2m relations with 3 models but navigating relation in + the reverse direction """ with self.assertNumQueries(1) as counter: - should_be_c3 = C3.objects.filter(c2s__c1s__name__startswith='c1').first() + should_be_c3 = C3.objects.filter( + c2s__c1s__name__startswith='c1').first() self.assertIsNotNone(should_be_c3) def test_filtering_two_jumps_reverse_with_version_at_t1(self): """ - Test filtering m2m relations with 3 models with propagation of querytime - information across all tables and navigating the relation in the reverse - direction + Test filtering m2m relations with 3 models with propagation of + querytime information across all tables and navigating the relation in + the reverse direction """ with self.assertNumQueries(2) as counter: should_be_c3 = C3.objects.as_of(self.t1). \ @@ -1858,9 +2038,9 @@ def test_filtering_two_jumps_reverse_with_version_at_t1(self): def test_filtering_two_jumps_reverse_with_version_at_t2(self): """ - Test filtering m2m relations with 3 models with propagation of querytime - information across all tables and navigating the relation in the reverse - direction but this time at point in time t2 + Test filtering m2m relations with 3 models with propagation of + querytime information across all tables and navigating the relation in + the reverse direction but this time at point in time t2 """ with self.assertNumQueries(2) as counter: should_be_c3 = C3.objects.as_of(self.t2) \ @@ -1924,15 +2104,18 @@ def setUp(self): self.t3 = get_utc_now() def test_t1_relations(self): - observer = Observer.objects.as_of(self.t1).filter(identity=self.o1.identity).first() + observer = Observer.objects.as_of(self.t1).filter( + identity=self.o1.identity).first() self.assertEqual(0, observer.subjects.all().count()) def test_t2_relations(self): - observer = Observer.objects.as_of(self.t2).filter(identity=self.o1.identity).first() + observer = Observer.objects.as_of(self.t2).filter( + identity=self.o1.identity).first() self.assertEqual(2, observer.subjects.all().count()) def test_t3_relations(self): - observer = Observer.objects.as_of(self.t3).filter(identity=self.o1.identity).first() + observer = Observer.objects.as_of(self.t3).filter( + identity=self.o1.identity).first() self.assertEqual(0, observer.subjects.all().count()) @@ -1960,32 +2143,39 @@ def setUp(self): self.c1 = self.c1.clone() self.c1.team_set = [] - self.team10 = Team.objects.current.get(identity=self.team10.identity).clone() + self.team10 = Team.objects.current.get( + identity=self.team10.identity).clone() self.c10.team_set.clear() self.t3 = get_utc_now() def test_t1_relations_for_cloned_referenced_object(self): - city = City.objects.as_of(self.t1).filter(identity=self.c1.identity).first() + city = City.objects.as_of(self.t1).filter( + identity=self.c1.identity).first() self.assertEqual(0, city.team_set.all().count()) def test_t2_relations_for_cloned_referenced_object(self): - city = City.objects.as_of(self.t2).filter(identity=self.c1.identity).first() + city = City.objects.as_of(self.t2).filter( + identity=self.c1.identity).first() self.assertEqual(2, city.team_set.all().count()) def test_t3_relations_for_cloned_referenced_object(self): - city = City.objects.as_of(self.t3).filter(identity=self.c1.identity).first() + city = City.objects.as_of(self.t3).filter( + identity=self.c1.identity).first() self.assertEqual(0, city.team_set.all().count()) def test_t1_relations_for_cloned_referring_object(self): - city = City.objects.as_of(self.t1).filter(identity=self.c10.identity).first() + city = City.objects.as_of(self.t1).filter( + identity=self.c10.identity).first() self.assertEqual(0, city.team_set.all().count()) def test_t2_relations_for_cloned_referring_object(self): - city = City.objects.as_of(self.t2).filter(identity=self.c10.identity).first() + city = City.objects.as_of(self.t2).filter( + identity=self.c10.identity).first() self.assertEqual(2, city.team_set.all().count()) def test_t3_relations_for_cloned_referring_object(self): - city = City.objects.as_of(self.t3).filter(identity=self.c10.identity).first() + city = City.objects.as_of(self.t3).filter( + identity=self.c10.identity).first() self.assertEqual(0, city.team_set.all().count()) @@ -2000,7 +2190,8 @@ def setUp(self): def test_select_related(self): with self.assertNumQueries(1): - player = Player.objects.as_of(self.t1).select_related('team').get(name='pl1.v1') + player = Player.objects.as_of(self.t1).select_related('team').get( + name='pl1.v1') self.assertIsNotNone(player) self.assertEqual(player.team, self.team1) @@ -2010,21 +2201,27 @@ def test_select_related(self): p1.save() t2 = get_utc_now() with self.assertNumQueries(1): - player = Player.objects.current.select_related('team').get(name='pl1.v2') + player = Player.objects.current.select_related('team').get( + name='pl1.v2') self.assertIsNotNone(player) self.assertIsNone(player.team) - # Multiple foreign-key related tables should still only require one query + # Multiple foreign-key related tables should still only require one + # query with self.assertNumQueries(1): - player = Player.objects.as_of(t2).select_related('team__city').get(name='pl2.v1') + player = Player.objects.as_of(t2).select_related('team__city').get( + name='pl2.v1') self.assertIsNotNone(player) self.assertEqual(self.city1, player.team.city) - @skipUnless(connection.vendor == 'sqlite', 'SQL is database specific, only sqlite is tested here.') + @skipUnless(connection.vendor == 'sqlite', + 'SQL is database specific, only sqlite is tested here.') def test_select_related_query_sqlite(self): - select_related_queryset = Player.objects.as_of(self.t1).select_related('team').all() + select_related_queryset = Player.objects.as_of(self.t1).select_related( + 'team').all() # Validating the query before verifying the SQL string - self.assertEqual(['pl1.v1', 'pl2.v1'], [player.name for player in select_related_queryset]) + self.assertEqual(['pl1.v1', 'pl2.v1'], + [player.name for player in select_related_queryset]) select_related_query = str(select_related_queryset.query) team_table = Team._meta.db_table @@ -2047,22 +2244,27 @@ def test_select_related_query_sqlite(self): "{team_table}"."name", "{team_table}"."city_id" FROM "{player_table}" - LEFT OUTER JOIN "{team_table}" ON ("{player_table}"."team_id" = "{team_table}"."identity" - AND (({team_table}.version_start_date <= {ts} - AND ({team_table}.version_end_date > {ts} - OR {team_table}.version_end_date IS NULL)))) + LEFT OUTER JOIN "{team_table}" ON ( + "{player_table}"."team_id" = "{team_table}"."identity" + AND (({team_table}.version_start_date <= {ts} + AND ({team_table}.version_end_date > {ts} + OR {team_table}.version_end_date IS NULL)))) WHERE ( ("{player_table}"."version_end_date" > {ts_wo_tz} OR "{player_table}"."version_end_date" IS NULL) AND "{player_table}"."version_start_date" <= {ts_wo_tz} ) - """.format(player_table=player_table, team_table=team_table, ts=t1_utc_w_tz, ts_wo_tz=t1_utc_wo_tz) - self.assertStringEqualIgnoreWhiteSpaces(expected_query, select_related_query) + """.format(player_table=player_table, team_table=team_table, + ts=t1_utc_w_tz, ts_wo_tz=t1_utc_wo_tz) + self.assertStringEqualIgnoreWhiteSpaces(expected_query, + select_related_query) - @skipUnless(connection.vendor == 'postgresql', 'SQL is database specific, only PostgreSQL is tested here.') + @skipUnless(connection.vendor == 'postgresql', + 'SQL is database specific, only PostgreSQL is tested here.') def test_select_related_query_postgresql(self): - select_related_query = str(Player.objects.as_of(self.t1).select_related('team').all().query) + select_related_query = str( + Player.objects.as_of(self.t1).select_related('team').all().query) team_table = Team._meta.db_table player_table = Player._meta.db_table @@ -2084,22 +2286,26 @@ def test_select_related_query_postgresql(self): "{team_table}"."name", "{team_table}"."city_id" FROM "{player_table}" - LEFT OUTER JOIN "{team_table}" ON ("{player_table}"."team_id" = "{team_table}"."identity" - AND (({team_table}.version_start_date <= {ts} - AND ({team_table}.version_end_date > {ts} - OR {team_table}.version_end_date IS NULL)))) + LEFT OUTER JOIN "{team_table}" ON ( + "{player_table}"."team_id" = "{team_table}"."identity" + AND (({team_table}.version_start_date <= {ts} + AND ({team_table}.version_end_date > {ts} + OR {team_table}.version_end_date IS NULL)))) WHERE ( ("{player_table}"."version_end_date" > {ts} OR "{player_table}"."version_end_date" IS NULL) AND "{player_table}"."version_start_date" <= {ts} ) - """.format(player_table=player_table, team_table=team_table, ts=t1_utc_w_tz, ts_wo_tz=t1_utc_wo_tz) - self.assertStringEqualIgnoreWhiteSpaces(expected_query, select_related_query) + """.format(player_table=player_table, team_table=team_table, + ts=t1_utc_w_tz, ts_wo_tz=t1_utc_wo_tz) + self.assertStringEqualIgnoreWhiteSpaces(expected_query, + select_related_query) def test_prefetch_related_via_foreignkey(self): with self.assertNumQueries(3): - team = Team.objects.as_of(self.t1).prefetch_related('player_set', 'city').first() + team = Team.objects.as_of(self.t1).prefetch_related('player_set', + 'city').first() self.assertIsNotNone(team) with self.assertNumQueries(0): @@ -2114,7 +2320,8 @@ def test_prefetch_related_via_foreignkey(self): p1.delete() with self.assertNumQueries(3): - team = Team.objects.current.prefetch_related('player_set', 'city').first() + team = Team.objects.current.prefetch_related('player_set', + 'city').first() self.assertIsNotNone(team) with self.assertNumQueries(0): @@ -2134,7 +2341,8 @@ def test_prefetch_related_via_foreignkey(self): def test_prefetch_related_via_many_to_many(self): # award1 - award10 - awards = [Award.objects.create(name='award' + str(i)) for i in range(1, 11)] + awards = [Award.objects.create(name='award' + str(i)) for i in + range(1, 11)] # city0 - city2 cities = [City.objects.create(name='city-' + str(i)) for i in range(3)] teams = [] @@ -2162,11 +2370,13 @@ def test_prefetch_related_via_many_to_many(self): # the -5s have awards: 5,6 with self.assertNumQueries(6): players_t2 = list( - Player.objects.as_of(t2).prefetch_related('team', 'awards').filter( + Player.objects.as_of(t2).prefetch_related('team', + 'awards').filter( name__startswith='player-').order_by('name') ) players_current = list( - Player.objects.current.prefetch_related('team', 'awards').filter( + Player.objects.current.prefetch_related('team', + 'awards').filter( name__startswith='player-').order_by('name') ) @@ -2180,7 +2390,8 @@ def test_prefetch_related_via_many_to_many(self): self.assertEqual(t2_p.team.name, current_p.team.name) if i % 2: self.assertGreater(len(t2_p.awards.all()), 0) - self.assertSetEqual(set(t2_p.awards.all()), set(current_p.awards.all())) + self.assertSetEqual(set(t2_p.awards.all()), + set(current_p.awards.all())) award_players.append(current_p) name_list = [] @@ -2189,10 +2400,10 @@ def test_prefetch_related_via_many_to_many(self): name_list.append(p.name) with self.assertNumQueries(2): - old_award_players = list( - Player.objects.as_of(t2).prefetch_related('awards').filter( - name__in=name_list).order_by('name') - ) + old_award_players = list( + Player.objects.as_of(t2).prefetch_related('awards').filter( + name__in=name_list).order_by('name') + ) with self.assertNumQueries(2): updated_award_players = list( @@ -2218,7 +2429,8 @@ def setUp(self): sleep(0.001) def modify_objects(self): - # Clone the city (which is referenced by a foreign key in the team object). + # Clone the city (which is referenced by a foreign key in the team + # object). self.c1a = self.c1.clone() self.c1a.name = 'city.v2' self.c1a.save() @@ -2233,7 +2445,8 @@ def test_reverse_fk_prefetch_queryset_with_historic_versions(self): """ prefetch_related with Prefetch objects that specify querysets. """ - historic_cities_qs = City.objects.as_of(self.time1).filter(name='city.v1').prefetch_related( + historic_cities_qs = City.objects.as_of(self.time1).filter( + name='city.v1').prefetch_related( Prefetch( 'team_set', queryset=Team.objects.as_of(self.time1), @@ -2250,12 +2463,17 @@ def test_reverse_fk_prefetch_queryset_with_historic_versions(self): self.assertEquals(1, len(historic_cities)) historic_city = historic_cities[0] self.assertEquals(2, len(historic_city.prefetched_teams)) - self.assertSetEqual({'team1.v1', 'team2.v1'}, {t.name for t in historic_city.prefetched_teams}) - team = [t for t in historic_city.prefetched_teams if t.name == 'team1.v1'][0] - self.assertSetEqual({'pl1.v1', 'pl2.v1'}, {p.name for p in team.prefetched_players}) + self.assertSetEqual( + {'team1.v1', 'team2.v1'}, + {t.name for t in historic_city.prefetched_teams}) + team = [t for t in historic_city.prefetched_teams if + t.name == 'team1.v1'][0] + self.assertSetEqual({'pl1.v1', 'pl2.v1'}, + {p.name for p in team.prefetched_players}) # For the 'current' case: - current_cities_qs = City.objects.current.filter(name='city.v1').prefetch_related( + current_cities_qs = City.objects.current.filter( + name='city.v1').prefetch_related( Prefetch( 'team_set', queryset=Team.objects.current, @@ -2272,13 +2490,18 @@ def test_reverse_fk_prefetch_queryset_with_historic_versions(self): self.assertEquals(1, len(current_cities)) current_city = current_cities[0] self.assertEquals(2, len(current_city.prefetched_teams)) - self.assertSetEqual({'team1.v1', 'team2.v1'}, {t.name for t in current_city.prefetched_teams}) - team = [t for t in current_city.prefetched_teams if t.name == 'team1.v1'][0] - self.assertSetEqual({'pl1.v1', 'pl2.v1'}, {p.name for p in team.prefetched_players}) + self.assertSetEqual( + {'team1.v1', 'team2.v1'}, + {t.name for t in current_city.prefetched_teams}) + team = [t for t in current_city.prefetched_teams if + t.name == 'team1.v1'][0] + self.assertSetEqual({'pl1.v1', 'pl2.v1'}, + {p.name for p in team.prefetched_players}) self.modify_objects() - historic_cities_qs = City.objects.as_of(self.time1).filter(name='city.v1').prefetch_related( + historic_cities_qs = City.objects.as_of(self.time1).filter( + name='city.v1').prefetch_related( Prefetch( 'team_set', queryset=Team.objects.as_of(self.time1), @@ -2295,12 +2518,17 @@ def test_reverse_fk_prefetch_queryset_with_historic_versions(self): self.assertEquals(1, len(historic_cities)) historic_city = historic_cities[0] self.assertEquals(2, len(historic_city.prefetched_teams)) - self.assertSetEqual({'team1.v1', 'team2.v1'}, {t.name for t in historic_city.prefetched_teams}) - team = [t for t in historic_city.prefetched_teams if t.name == 'team1.v1'][0] - self.assertSetEqual({'pl1.v1', 'pl2.v1'}, {p.name for p in team.prefetched_players}) + self.assertSetEqual( + {'team1.v1', 'team2.v1'}, + {t.name for t in historic_city.prefetched_teams}) + team = [t for t in historic_city.prefetched_teams if + t.name == 'team1.v1'][0] + self.assertSetEqual({'pl1.v1', 'pl2.v1'}, + {p.name for p in team.prefetched_players}) # For the 'current' case: - current_cities_qs = City.objects.current.filter(name='city.v2').prefetch_related( + current_cities_qs = City.objects.current.filter( + name='city.v2').prefetch_related( Prefetch( 'team_set', queryset=Team.objects.current, @@ -2317,23 +2545,28 @@ def test_reverse_fk_prefetch_queryset_with_historic_versions(self): self.assertEquals(1, len(current_cities)) current_city = current_cities[0] self.assertEquals(2, len(current_city.prefetched_teams)) - self.assertSetEqual({'team1.v2', 'team2.v1'}, {t.name for t in current_city.prefetched_teams}) - team = [t for t in current_city.prefetched_teams if t.name == 'team1.v2'][0] - self.assertSetEqual({'pl1.v2', 'pl2.v1'}, {p.name for p in team.prefetched_players}) + self.assertSetEqual( + {'team1.v2', 'team2.v1'}, + {t.name for t in current_city.prefetched_teams}) + team = [t for t in current_city.prefetched_teams if + t.name == 'team1.v2'][0] + self.assertSetEqual({'pl1.v2', 'pl2.v1'}, + {p.name for p in team.prefetched_players}) - # When a different time is specified for the prefetch queryset than for the base queryset: + # When a different time is specified for the prefetch queryset + # than for the base queryset: with self.assertRaises(ValueError): _ = City.objects.current.filter(name='city.v2').prefetch_related( Prefetch( 'team_set', - queryset = Team.objects.as_of(self.time1), - to_attr = 'prefetched_teams' + queryset=Team.objects.as_of(self.time1), + to_attr='prefetched_teams' ), Prefetch( 'prefetched_teams__player_set', - queryset = Player.objects.as_of(self.time1), - to_attr = 'prefetched_players' + queryset=Player.objects.as_of(self.time1), + to_attr='prefetched_players' ), )[0] @@ -2341,59 +2574,83 @@ def test_reverse_fk_simple_prefetch_with_historic_versions(self): """ prefetch_related with simple lookup. """ - historic_cities_qs = City.objects.as_of(self.time1).filter(name='city.v1').prefetch_related( + historic_cities_qs = City.objects.as_of(self.time1).filter( + name='city.v1').prefetch_related( 'team_set', 'team_set__player_set') with self.assertNumQueries(3): historic_cities = list(historic_cities_qs) self.assertEquals(1, len(historic_cities)) historic_city = historic_cities[0] self.assertEquals(2, len(historic_city.team_set.all())) - self.assertSetEqual({'team1.v1', 'team2.v1'}, {t.name for t in historic_city.team_set.all()}) - team = [t for t in historic_city.team_set.all() if t.name == 'team1.v1'][0] - self.assertSetEqual({'pl1.v1', 'pl2.v1'}, {p.name for p in team.player_set.all()}) + self.assertSetEqual({'team1.v1', 'team2.v1'}, + {t.name for t in historic_city.team_set.all()}) + team = \ + [t for t in historic_city.team_set.all() if + t.name == 'team1.v1'][ + 0] + self.assertSetEqual({'pl1.v1', 'pl2.v1'}, + {p.name for p in team.player_set.all()}) # For the 'current' case: - current_cities_qs = City.objects.current.filter(name='city.v1').prefetch_related( + current_cities_qs = City.objects.current.filter( + name='city.v1').prefetch_related( 'team_set', 'team_set__player_set') with self.assertNumQueries(3): current_cities = list(current_cities_qs) self.assertEquals(1, len(current_cities)) current_city = current_cities[0] self.assertEquals(2, len(current_city.team_set.all())) - self.assertSetEqual({'team1.v1', 'team2.v1'}, {t.name for t in current_city.team_set.all()}) - team = [t for t in current_city.team_set.all() if t.name == 'team1.v1'][0] - self.assertSetEqual({'pl1.v1', 'pl2.v1'}, {p.name for p in team.player_set.all()}) - - # Now, we'll clone the city (which is referenced by a foreign key in the team object). + self.assertSetEqual({'team1.v1', 'team2.v1'}, + {t.name for t in current_city.team_set.all()}) + team = \ + [t for t in current_city.team_set.all() if + t.name == 'team1.v1'][0] + self.assertSetEqual({'pl1.v1', 'pl2.v1'}, + {p.name for p in team.player_set.all()}) + + # Now, we'll clone the city (which is referenced by a foreign key in + # the team object). # The queries above, when repeated, should work the same as before. self.modify_objects() - historic_cities_qs = City.objects.as_of(self.time1).filter(name='city.v1').prefetch_related( + historic_cities_qs = City.objects.as_of(self.time1).filter( + name='city.v1').prefetch_related( 'team_set', 'team_set__player_set') with self.assertNumQueries(3): historic_cities = list(historic_cities_qs) self.assertEquals(1, len(historic_cities)) historic_city = historic_cities[0] self.assertEquals(2, len(historic_city.team_set.all())) - self.assertSetEqual({'team1.v1', 'team2.v1'}, {t.name for t in historic_city.team_set.all()}) - team = [t for t in historic_city.team_set.all() if t.name == 'team1.v1'][0] - self.assertSetEqual({'pl1.v1', 'pl2.v1'}, {p.name for p in team.player_set.all()}) + self.assertSetEqual({'team1.v1', 'team2.v1'}, + {t.name for t in historic_city.team_set.all()}) + team = \ + [t for t in historic_city.team_set.all() if + t.name == 'team1.v1'][ + 0] + self.assertSetEqual({'pl1.v1', 'pl2.v1'}, + {p.name for p in team.player_set.all()}) # For the 'current' case: - current_cities_qs = City.objects.current.filter(name='city.v2').prefetch_related( + current_cities_qs = City.objects.current.filter( + name='city.v2').prefetch_related( 'team_set', 'team_set__player_set') with self.assertNumQueries(3): current_cities = list(current_cities_qs) self.assertEquals(1, len(current_cities)) current_city = current_cities[0] self.assertEquals(2, len(current_city.team_set.all())) - self.assertSetEqual({'team1.v2', 'team2.v1'}, {t.name for t in current_city.team_set.all()}) - team = [t for t in current_city.team_set.all() if t.name == 'team1.v2'][0] - self.assertSetEqual({'pl1.v2', 'pl2.v1'}, {p.name for p in team.player_set.all()}) + self.assertSetEqual({'team1.v2', 'team2.v1'}, + {t.name for t in current_city.team_set.all()}) + team = \ + [t for t in current_city.team_set.all() if + t.name == 'team1.v2'][0] + self.assertSetEqual({'pl1.v2', 'pl2.v1'}, + {p.name for p in team.player_set.all()}) def test_foreign_key_prefetch_with_historic_version(self): self.modify_objects() - historic_city = City.objects.as_of(self.time1).get(identity=self.c1.identity) + historic_city = City.objects.as_of(self.time1).get( + identity=self.c1.identity) # Test with a simple prefetch. with self.assertNumQueries(2): @@ -2437,9 +2694,11 @@ def test_foreign_key_prefetch_with_historic_version(self): self.assertIsNotNone(team.city) self.assertEquals(team.city.id, historic_city.id) - # Test with a Prefetch object with a queryset with an as_of that differs from the parents. - # If permitted, it would lead to possibly incorrect results and definitely cache misses, - # which would defeat the purpose of using prefetch_related. So a ValueError should be raised. + # Test with a Prefetch object with a queryset with an as_of that + # differs from the parents. + # If permitted, it would lead to possibly incorrect results and + # definitely cache misses, which would defeat the purpose of using + # prefetch_related. So a ValueError should be raised. with self.assertRaises(ValueError): team = Team.objects.as_of(self.time1).filter( identity=self.t1.identity @@ -2448,7 +2707,8 @@ def test_foreign_key_prefetch_with_historic_version(self): queryset=City.objects.current ))[0] - # Test with a Prefetch object with a queryset with an as_of, when the parent has no as_of. + # Test with a Prefetch object with a queryset with an as_of, when the + # parent has no as_of. # This is a bit of an odd thing to do, but possible. with self.assertNumQueries(2): team = Team.objects.filter( @@ -2467,14 +2727,25 @@ def setUp(self): self.barolo = Wine.objects.create(name="Barolo", vintage=2010) self.port = Wine.objects.create(name="Port wine", vintage=2014) - self.jacques = WineDrinker.objects.create(name='Jacques', glass_content=self.bordeaux) - self.alfonso = WineDrinker.objects.create(name='Alfonso', glass_content=self.barolo) - self.jackie = WineDrinker.objects.create(name='Jackie', glass_content=self.port) - - self.red_sailor_hat = WineDrinkerHat.objects.create(shape='Sailor', color='red', wearer=self.jackie) - self.blue_turban_hat = WineDrinkerHat.objects.create(shape='Turban', color='blue', wearer=self.alfonso) - self.green_vagabond_hat = WineDrinkerHat.objects.create(shape='Vagabond', color='green', wearer=self.jacques) - self.pink_breton_hat = WineDrinkerHat.objects.create(shape='Breton', color='pink') + self.jacques = WineDrinker.objects.create(name='Jacques', + glass_content=self.bordeaux) + self.alfonso = WineDrinker.objects.create(name='Alfonso', + glass_content=self.barolo) + self.jackie = WineDrinker.objects.create(name='Jackie', + glass_content=self.port) + + self.red_sailor_hat = WineDrinkerHat.objects.create( + shape='Sailor', + color='red', + wearer=self.jackie) + self.blue_turban_hat = WineDrinkerHat.objects.create( + shape='Turban', + color='blue', + wearer=self.alfonso) + self.green_vagabond_hat = WineDrinkerHat.objects.create( + shape='Vagabond', color='green', wearer=self.jacques) + self.pink_breton_hat = WineDrinkerHat.objects.create(shape='Breton', + color='pink') self.t1 = get_utc_now() sleep(0.1) @@ -2508,27 +2779,36 @@ def test_accessibility_of_versions_and_non_versionables_via_plain_fk(self): # Access coming from plain Models (direct access) barolo = Wine.objects.get(name='Barolo') all_time_barolo_drinkers = barolo.drinkers.all() - self.assertEqual({'Alfonso', 'Jacques'}, {winedrinker.name for winedrinker in all_time_barolo_drinkers}) + self.assertEqual({'Alfonso', 'Jacques'}, + {winedrinker.name for winedrinker in + all_time_barolo_drinkers}) t1_barolo_drinkers = barolo.drinkers.as_of(self.t1).all() - self.assertEqual({'Alfonso'}, {winedrinker.name for winedrinker in t1_barolo_drinkers}) + self.assertEqual({'Alfonso'}, {winedrinker.name for winedrinker in + t1_barolo_drinkers}) t2_barolo_drinkers = barolo.drinkers.as_of(self.t2).all() - self.assertEqual({'Alfonso', 'Jacques'}, {winedrinker.name for winedrinker in t2_barolo_drinkers}) + self.assertEqual({'Alfonso', 'Jacques'}, + {winedrinker.name for winedrinker in + t2_barolo_drinkers}) bordeaux = Wine.objects.get(name='Bordeaux') t2_bordeaux_drinkers = bordeaux.drinkers.as_of(self.t2).all() - self.assertEqual(set([]), {winedrinker.name for winedrinker in t2_bordeaux_drinkers}) + self.assertEqual(set([]), {winedrinker.name for winedrinker in + t2_bordeaux_drinkers}) - def test_accessibility_of_versions_and_non_versionables_via_versioned_fk(self): + def test_accessibility_of_versions_and_non_versionables_via_versioned_fk( + self): jacques_current = WineDrinker.objects.current.get(name='Jacques') jacques_t1 = WineDrinker.objects.as_of(self.t1).get(name='Jacques') # Testing direct access - # We're not able to track changes in objects that are not versionables, pointing objects that are versionables - # Therefore, it seems like Jacques always had the same combination of hats (even though at t1 and t2, he had - # one single hat) - self.assertEqual({'Vagabond', 'Sailor'}, {hat.shape for hat in jacques_current.hats.all()}) + # We're not able to track changes in objects that are not versionables, + # pointing objects that are versionables. + # Therefore, it seems like Jacques always had the same combination of + # hats (even though at t1 and t2, he had one single hat) + self.assertEqual({'Vagabond', 'Sailor'}, + {hat.shape for hat in jacques_current.hats.all()}) self.assertEqual({hat.shape for hat in jacques_t1.hats.all()}, {hat.shape for hat in jacques_current.hats.all()}) # Fetch jackie-object; at that point, jackie still had her Sailor hat @@ -2548,17 +2828,24 @@ def test_accessibility_of_versions_and_non_versionables_via_versioned_fk(self): self.assertEqual('Jacques', should_be_jacques.name) self.assertTrue(should_be_jacques.is_current) - # For the records: navigate to a prior version of a versionable object ('Jacques') as follows - # TODO: Issue #33 on Github aims for a more direct syntax to get to another version of the same object - should_be_jacques_t1 = should_be_jacques.__class__.objects.as_of(self.t1).get(identity=should_be_jacques.identity) + # For the records: navigate to a prior version of a versionable + # object ('Jacques') as follows + # TODO: Issue #33 on Github aims for a more direct syntax to get to + # another version of the same object + should_be_jacques_t1 = should_be_jacques.__class__.objects.as_of( + self.t1).get( + identity=should_be_jacques.identity) self.assertEqual(jacques_t1, should_be_jacques_t1) def test_filter_on_fk_versioned_and_nonversioned_join(self): # Get non-versioned objects, filtering on a FK-related versioned object - jacques_hats = WineDrinkerHat.objects.filter(wearer__name='Jacques').distinct() - self.assertEqual(set(jacques_hats), set([self.green_vagabond_hat, self.red_sailor_hat])) + jacques_hats = WineDrinkerHat.objects.filter( + wearer__name='Jacques').distinct() + self.assertEqual(set(jacques_hats), + set([self.green_vagabond_hat, self.red_sailor_hat])) - # Get all versions of a Versionable by filtering on a FK-related non-versioned object + # Get all versions of a Versionable by filtering on a FK-related + # non-versioned object person_versions = WineDrinker.objects.filter(hats__shape='Vagabond') self.assertIn(self.jacques, person_versions) @@ -2576,7 +2863,6 @@ def test_filter_on_fk_relation(self): class SpecifiedUUIDTest(TestCase): - @staticmethod def uuid4(uuid_value=None): if not uuid_value: @@ -2597,12 +2883,14 @@ def test_create_with_uuid(self): def test_create_with_forced_identity(self): - # This test does some artificial manipulation of versioned objects, do not use it as an example + # This test does some artificial manipulation of versioned objects, + # do not use it as an example # for real-life usage! p = Person.objects.create(name="Abela") - # Postgresql will provide protection here, since util.postgresql.create_current_version_unique_identity_indexes + # Postgresql will provide protection here, since + # util.postgresql.create_current_version_unique_identity_indexes # has been invoked in the post migration handler. if connection.vendor == 'postgresql' and get_version() >= '1.7': with self.assertRaises(IntegrityError): @@ -2611,7 +2899,9 @@ def test_create_with_forced_identity(self): Person.objects.create(forced_identity=ident, name="Alexis") p.delete() - sleep(0.1) # The start date of p2 does not necessarily have to equal the end date of p. + # The start date of p2 does not necessarily have to equal the end date + # of p. + sleep(0.1) ident = self.uuid4(p.identity) p2 = Person.objects.create(forced_identity=ident, name="Alexis") @@ -2620,12 +2910,12 @@ def test_create_with_forced_identity(self): self.assertEqual(p.identity, p2.identity) self.assertNotEqual(p2.id, p2.identity) - # Thanks to that artificial manipulation, p is now the previous version of p2: + # Thanks to that artificial manipulation, p is now the previous version + # of p2: self.assertEqual(p.name, Person.objects.previous_version(p2).name) class VersionRestoreTest(TestCase): - def setup_common(self): sf = City.objects.create(name="San Francisco") forty_niners = Team.objects.create(name='49ers', city=sf) @@ -2662,7 +2952,8 @@ def test_restore_latest_version(self): # The relationships are still present on the previous version. previous = Player.objects.previous_version(restored) self.assertEqual(deleted_at, previous.version_end_date) - self.assertSetEqual(set(previous.awards.all()), set(self.awards.values())) + self.assertSetEqual(set(previous.awards.all()), + set(self.awards.values())) self.assertEqual(self.forty_niners, previous.team) def test_restore_previous_version(self): @@ -2688,18 +2979,21 @@ def test_restore_previous_version(self): # The relationships are also present on the previous version. previous = Player.objects.previous_version(restored) - self.assertSetEqual(set(previous.awards.all()), set(self.awards.values())) + self.assertSetEqual(set(previous.awards.all()), + set(self.awards.values())) self.assertEqual(self.forty_niners, previous.team) # There should be no overlap of version periods. - self.assertEquals(previous.version_end_date, restored.version_start_date) + self.assertEquals(previous.version_end_date, + restored.version_start_date) def test_restore_with_required_foreignkey(self): team = Team.objects.create(name="Flying Pigs") mascot_v1 = Mascot.objects.create(name="Curly", team=team) mascot_v1.delete() - # Restoring without supplying a value for the required foreign key will fail. + # Restoring without supplying a value for the required foreign key + # will fail. with self.assertRaises(ForeignKeyRequiresValueError): mascot_v1.restore() @@ -2710,18 +3004,22 @@ def test_restore_with_required_foreignkey(self): with self.assertRaises(ForeignKeyRequiresValueError): mascot2_v1.restore() - self.assertEqual(2, Mascot.objects.filter(name=mascot2_v1.name).count()) - self.assertEqual(1, Mascot.objects.current.filter(name=mascot2_v1.name).count()) + self.assertEqual(2, + Mascot.objects.filter(name=mascot2_v1.name).count()) + self.assertEqual(1, Mascot.objects.current.filter( + name=mascot2_v1.name).count()) # If a value (object or pk) is supplied, the restore will succeed. team2 = Team.objects.create(name="Submarine Sandwiches") restored = mascot2_v1.restore(team=team2) - self.assertEqual(3, Mascot.objects.filter(name=mascot2_v1.name).count()) + self.assertEqual(3, + Mascot.objects.filter(name=mascot2_v1.name).count()) self.assertEqual(team2, restored.team) restored.delete() rerestored = mascot2_v1.restore(team_id=team.pk) - self.assertEqual(4, Mascot.objects.filter(name=mascot2_v1.name).count()) + self.assertEqual(4, + Mascot.objects.filter(name=mascot2_v1.name).count()) self.assertEqual(team, rerestored.team) def test_over_time(self): @@ -2751,31 +3049,37 @@ def test_over_time(self): p1 = Player.objects.get(name='p1.v2').restore(team=team2) # p1 did exist at t2, but not at t3. - self.assertIsNotNone(Player.objects.as_of(t2).filter(name='p1.v2').first()) - self.assertIsNone(Player.objects.as_of(t3).filter(name='p1.v2').first()) + self.assertIsNotNone( + Player.objects.as_of(t2).filter(name='p1.v2').first()) + self.assertIsNone( + Player.objects.as_of(t3).filter(name='p1.v2').first()) # p1 re-appeared later with team2, though. self.assertEqual(team2, Player.objects.current.get(name='p1.v2').team) # many-to-many relations - self.assertEqual([], list(Award.objects.as_of(t2).get(name='a1.v1').players.all())) - self.assertEqual('p2.v1', Award.objects.as_of(t3).get(name='a1.v1').players.first().name) - self.assertEqual([], list(Award.objects.current.get(name='a1.v1').players.all())) + self.assertEqual([], list( + Award.objects.as_of(t2).get(name='a1.v1').players.all())) + self.assertEqual('p2.v1', Award.objects.as_of(t3).get( + name='a1.v1').players.first().name) + self.assertEqual([], list( + Award.objects.current.get(name='a1.v1').players.all())) # Expected version counts: self.assertEqual(1, Team.objects.filter(name='team1.v1').count()) self.assertEqual(1, Team.objects.filter(name='team2.v1').count()) - self.assertEqual(3, Player.objects.filter(identity=p1.identity).count()) + self.assertEqual(3, + Player.objects.filter(identity=p1.identity).count()) self.assertEqual(1, Player.objects.filter(name='p2.v1').count()) m2m_manager = Award._meta.get_field('players').rel.through.objects self.assertEqual(1, m2m_manager.all().count()) def test_restore_two_in_memory_objects(self): # Tests issue #90 - # Restoring two in-memory objects with the same identity, which, according - # to their in-memory state, are both the current version, should not - # result in having more than one current object with the same identity - # present in the database. + # Restoring two in-memory objects with the same identity, which, + # according to their in-memory state, are both the current version, + # should not result in having more than one current object with the + # same identity present in the database. a = City(name="A") a.save() b = a.clone() @@ -2785,13 +3089,13 @@ def test_restore_two_in_memory_objects(self): a.restore() b = City.objects.get(name="B") b2 = b.restore() - current_objects = City.objects.filter(version_end_date=None, identity=b.identity) + current_objects = City.objects.filter(version_end_date=None, + identity=b.identity) self.assertEqual(1, len(current_objects)) self.assertEqual(b2.pk, current_objects[0].pk) class DetachTest(TestCase): - def test_simple_detach(self): c1 = City.objects.create(name="Atlantis").clone() c1_identity = c1.identity @@ -2807,7 +3111,8 @@ def test_simple_detach(self): def test_detach_with_relations(self): """ - ManyToMany and reverse ForeignKey relationships are not kept. ForeignKey relationships are kept. + ManyToMany and reverse ForeignKey relationships are not kept. + ForeignKey relationships are kept. """ t = Team.objects.create(name='Raining Rats') t_pk = t.pk @@ -2833,7 +3138,6 @@ def test_detach_with_relations(self): class DeferredFieldsTest(TestCase): - def setUp(self): self.c1 = City.objects.create(name="Porto") self.team1 = Team.objects.create(name="Tigers", city=self.c1) @@ -2842,38 +3146,47 @@ def test_simple_defer(self): limited = City.objects.current.only('name').get(pk=self.c1.pk) deferred_fields = set(Versionable.VERSIONABLE_FIELDS) deferred_fields.remove('id') - self.assertSetEqual(deferred_fields, set(limited.get_deferred_fields())) + self.assertSetEqual(deferred_fields, + set(limited.get_deferred_fields())) for field_name in deferred_fields: - self.assertNotIn(field_name, limited.__dict__ ) + self.assertNotIn(field_name, limited.__dict__) deferred_fields = ['version_start_date', 'version_end_date'] - deferred = City.objects.current.defer(*deferred_fields).get(pk=self.c1.pk) - self.assertSetEqual(set(deferred_fields), set(deferred.get_deferred_fields())) + deferred = City.objects.current.defer(*deferred_fields).get( + pk=self.c1.pk) + self.assertSetEqual(set(deferred_fields), + set(deferred.get_deferred_fields())) for field_name in deferred_fields: - self.assertNotIn(field_name, deferred.__dict__ ) + self.assertNotIn(field_name, deferred.__dict__) # Accessing deferred fields triggers queries: with self.assertNumQueries(2): - self.assertEquals(self.c1.version_start_date, deferred.version_start_date) - self.assertEquals(self.c1.version_end_date, deferred.version_end_date) + self.assertEquals(self.c1.version_start_date, + deferred.version_start_date) + self.assertEquals(self.c1.version_end_date, + deferred.version_end_date) # If already fetched, no query is made: with self.assertNumQueries(0): - self.assertEquals(self.c1.version_start_date, deferred.version_start_date) + self.assertEquals(self.c1.version_start_date, + deferred.version_start_date) def test_deferred_foreign_key_field(self): team_full = Team.objects.current.get(pk=self.team1.pk) - self.assertIn('city_id', team_full.__dict__ ) + self.assertIn('city_id', team_full.__dict__) team_light = Team.objects.current.only('name').get(pk=self.team1.pk) - self.assertNotIn('city_id', team_light.__dict__ ) + self.assertNotIn('city_id', team_light.__dict__) with self.assertNumQueries(2): - # One query to get city_id, and one query to get the related City object. + # One query to get city_id, and one query to get the related City + # object. self.assertEquals(self.c1.name, team_light.city.name) def test_reverse_foreign_key_access(self): city = City.objects.current.only('name').get(identity=self.c1.identity) with self.assertNumQueries(2): - # One query to get the identity, one query to get the related objects. - self.assertSetEqual({self.team1.pk}, {o.pk for o in city.team_set.all()}) + # One query to get the identity, one query to get the related + # objects. + self.assertSetEqual({self.team1.pk}, + {o.pk for o in city.team_set.all()}) def test_many_to_many_access(self): player1 = Player.objects.create(name='Raaaaaow', team=self.team1) @@ -2883,16 +3196,20 @@ def test_many_to_many_access(self): award2 = Award.objects.create(name='Frighteningly fast') award2.players.add(player1, player2) - player2_light = Player.objects.current.only('name').get(identity=player2.identity) + player2_light = Player.objects.current.only('name').get( + identity=player2.identity) with self.assertNumQueries(1): - # Many-to-many fields use the id field, which is always fetched, so only one query - # should be made to get the related objects. - self.assertSetEqual({award1.pk, award2.pk}, {o.pk for o in player2_light.awards.all()}) + # Many-to-many fields use the id field, which is always fetched, + # so only one query should be made to get the related objects. + self.assertSetEqual({award1.pk, award2.pk}, + {o.pk for o in player2_light.awards.all()}) # And from the other direction: - award2_light = Award.objects.current.only('name').get(identity=award2.identity) + award2_light = Award.objects.current.only('name').get( + identity=award2.identity) with self.assertNumQueries(1): - self.assertSetEqual({player1.pk, player2.pk}, {o.pk for o in award2_light.players.all()}) + self.assertSetEqual({player1.pk, player2.pk}, + {o.pk for o in award2_light.players.all()}) def test_clone_of_deferred_object(self): c1_v1_partial = City.objects.current.defer('name').get(pk=self.c1.pk) @@ -2906,7 +3223,8 @@ def test_restore_of_deferred_object(self): t1 = get_utc_now() sleep(0.001) c1_v2 = self.c1.clone() - c1_v1 = City.objects.as_of(t1).defer('name').get(identity=c1_v2.identity) + c1_v1 = City.objects.as_of(t1).defer('name').get( + identity=c1_v2.identity) self.assertRaisesMessage( ValueError, 'Can not restore a model instance that has deferred fields', diff --git a/versions_tests/tests/test_utils.py b/versions_tests/tests/test_utils.py index 903c019..3bad998 100644 --- a/versions_tests/tests/test_utils.py +++ b/versions_tests/tests/test_utils.py @@ -1,9 +1,11 @@ from unittest import skipUnless + +from django.db import IntegrityError from django.db import connection from django.test import TestCase, TransactionTestCase -from django.db import IntegrityError -from versions_tests.models import ChainStore, Color + from versions.util.postgresql import get_uuid_like_indexes_on_table +from versions_tests.models import ChainStore, Color @skipUnless(connection.vendor == 'postgresql', "Postgresql-specific test") @@ -14,7 +16,8 @@ def setUp(self): self.black = Color.objects.create(name='black') self.yellow = Color.objects.create(name='yellow') - # - only one store with the same name and subchain_id can exist in a single city + # - only one store with the same name and subchain_id can exist in a + # single city # - no two stores can share the same door_frame_color and door_color store = { 'subchain_id': 1, @@ -28,60 +31,64 @@ def setUp(self): self.sb1 = ChainStore.objects.create(**store) def test_version_unique(self): - - # It should not be possible to create another store with the same name, city, and subchain_id + # It should not be possible to create another store with the same name, + # city, and subchain_id with self.assertRaises(IntegrityError): sb2 = ChainStore.objects.create( - subchain_id = self.sb1.subchain_id, - city = self.sb1.city, - name = self.sb1.name, - door_frame_color = self.sb1.door_frame_color, - door_color = self.green + subchain_id=self.sb1.subchain_id, + city=self.sb1.city, + name=self.sb1.name, + door_frame_color=self.sb1.door_frame_color, + door_color=self.green ) - # It should not be possible to create another store with the same door and door_frame color + # It should not be possible to create another store with the same door + # and door_frame color with self.assertRaises(IntegrityError): sb3 = ChainStore.objects.create( - subchain_id = self.sb1.subchain_id, - city = self.sb1.city, - name = "Bearded Bob's style", - door_frame_color = self.sb1.door_frame_color, - door_color = self.sb1.door_color + subchain_id=self.sb1.subchain_id, + city=self.sb1.city, + name="Bearded Bob's style", + door_frame_color=self.sb1.door_frame_color, + door_color=self.sb1.door_color ) - # It should be possible to create objects as long as they follow the unique constraints, though: + # It should be possible to create objects as long as they follow the + # unique constraints, though: sb4 = ChainStore.objects.create( - subchain_id = self.sb1.subchain_id, - city = self.sb1.city, - name = "Bearded Bob's style", - door_frame_color = self.sb1.door_frame_color, - door_color = self.green + subchain_id=self.sb1.subchain_id, + city=self.sb1.city, + name="Bearded Bob's style", + door_frame_color=self.sb1.door_frame_color, + door_color=self.green ) sb5 = ChainStore.objects.create( - subchain_id = sb4.subchain_id + 1, - city = sb4.city, - name = sb4.name, - door_frame_color = sb4.door_frame_color, - door_color = self.yellow + subchain_id=sb4.subchain_id + 1, + city=sb4.city, + name=sb4.name, + door_frame_color=sb4.door_frame_color, + door_color=self.yellow ) - # If a version is soft-deleted, it should be possible to create a new object with the + # If a version is soft-deleted, it should be possible to create a new + # object with the # value of that old version sb4.delete() sb6 = ChainStore.objects.create( - subchain_id = sb4.subchain_id, - city = sb4.city, - name = sb4.name, - door_frame_color = sb4.door_frame_color, - door_color = sb4.door_color + subchain_id=sb4.subchain_id, + city=sb4.city, + name=sb4.name, + door_frame_color=sb4.door_frame_color, + door_color=sb4.door_color ) def test_identity_unique(self): c = Color.objects.create(name='sky blue') c.identity = self.green.identity - # It should not be possible to have two "current" objects with the same identity: + # It should not be possible to have two "current" objects with the + # same identity: with self.assertRaises(IntegrityError): c.save() @@ -89,11 +96,13 @@ def test_identity_unique(self): @skipUnless(connection.vendor == 'postgresql', "Postgresql-specific test") class PostgresqlUuidLikeIndexesTest(TestCase): def test_no_like_indexes_on_uuid_columns(self): - # Django creates like indexes on char columns. In Django 1.7.x and below, there is no - # support for native uuid columns, so CleanerVersion uses a CharField to store the - # uuid values. For postgresql, Django creates special indexes for char fields so that + # Django creates like indexes on char columns. In Django 1.7.x and + # below, there is no support for native uuid columns, so + # CleanerVersion uses a CharField to store the uuid values. For + # postgresql, Django creates special indexes for char fields so that # like searches (e.g. WHERE foo like '%bar') are fast. - # Those indexes are not going to be used in our case, and extra indexes will slow down - # updates and inserts. So, they should have been removed by the post_migrate handler in + # Those indexes are not going to be used in our case, and extra + # indexes will slow down updates and inserts. So, they should have + # been removed by the post_migrate handler in # versions_tests.apps.VersionsTestsConfig.ready. self.assertEqual(0, len(get_uuid_like_indexes_on_table(ChainStore))) From 3ea87a2468e1800548b62de8e519d5255e39605a Mon Sep 17 00:00:00 2001 From: Jeckelmann Manuel Date: Mon, 11 Sep 2017 11:29:41 +0200 Subject: [PATCH 3/4] Adapted setup.py not exclude the cleanerversion module --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b6410a2..5d2fb95 100644 --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ 'Andrea Marcacci', author_email='engineering.sophia@swisscom.com', license='Apache License 2.0', - packages=find_packages(exclude=['cleanerversion', 'cleanerversion.*']), + packages=find_packages(exclude=['cleanerversion.settings.*']), url='https://github.com/swisscom/cleanerversion', install_requires=['django'], package_data={'versions': ['static/js/*.js', From 8a0264a3c6f17661fb1741a42ee8ef5b69eeb533 Mon Sep 17 00:00:00 2001 From: Jeckelmann Manuel Date: Mon, 11 Sep 2017 11:40:03 +0200 Subject: [PATCH 4/4] Changed import strategy in setup.py --- setup.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 5d2fb95..39c5875 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,6 @@ #!/usr/bin/env python from setuptools import setup, find_packages -import cleanerversion - """ Documentation can be found at https://docs.python.org/2/distutils/index.html, but usually you only need to do the following steps to publish a new package @@ -26,7 +24,7 @@ Reduce the set of formats or install libs """ -version = cleanerversion.get_version() +version = __import__('cleanerversion').get_version() setup(name='CleanerVersion', version=version,