diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..94e6a5b --- /dev/null +++ b/.travis.yml @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- + +language: python + +python: + - "2.6" + - "2.7" + - "3.3" + +install: + - pip install --upgrade pip --use-mirrors + - pip install coveralls --use-mirrors + - pip install . + +script: + - coverage run --source=fixture setup.py test + +after_success: + - coveralls + +notifications: + email: false diff --git a/README.txt b/README.txt index 3ffd448..0989480 100644 --- a/README.txt +++ b/README.txt @@ -4,10 +4,10 @@ To build docs run :: $ make -C docs html -For info on running tests, see fixture/test/__init__.py +For info on running tests, see tests/__init__.py To run tests :: - - $ python setup_test_buildout.py + + $ python setup.py test $ ./bin/buildout - $ ./bin/test-fixture \ No newline at end of file + $ ./bin/test-fixture diff --git a/docs/source/conf.py b/docs/source/conf.py index a78e434..8354dfd 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -12,7 +12,7 @@ # serve to show the default value. import sys, os -# for doctests of Django examples to work +# for doctests of Django examples to work sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'fixture', 'examples', 'django_example')) # register some docutils directives @@ -21,6 +21,7 @@ # for version number: import fixture + # If your extensions are in another directory, add it here. If the directory # is relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. @@ -50,9 +51,16 @@ # other places throughout the built documents. # # The short X.Y version. -version = fixture.__version__ + +# Get the version string. Cannot be done with import! +with open(os.path.join('..', '..', 'fixture', 'version.py'), 'rt') as f: + version = re.search( + '__version__\s*=\s*"(?P.*)"\n', + f.read() + ).group('version') + # The full version, including alpha/beta/rc tags. -release = fixture.__version__ +release = version # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/docs/source/using-loadable-fixture.rst b/docs/source/using-loadable-fixture.rst index ae0a9c1..71ef07f 100644 --- a/docs/source/using-loadable-fixture.rst +++ b/docs/source/using-loadable-fixture.rst @@ -158,7 +158,7 @@ Fixture is designed for applications that already have a way to store data; the >>> engine = create_engine('sqlite:////tmp/fixture_example.db') >>> metadata = MetaData() >>> metadata.bind = engine - >>> Session = scoped_session(sessionmaker(bind=metadata.bind, autoflush=True, transactional=True)) + >>> Session = scoped_session(sessionmaker(bind=metadata.bind, autoflush=True)) >>> session = Session() Set up the table and mapper for authors ... diff --git a/ez_setup.py b/ez_setup.py index 89cf056..a87181d 100644 --- a/ez_setup.py +++ b/ez_setup.py @@ -13,6 +13,9 @@ This file can also be run as a script to install or upgrade setuptools. """ + +from __future__ import print_function + import sys DEFAULT_VERSION = "0.6c8" DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3] @@ -88,10 +91,10 @@ def do_download(): try: import pkg_resources except ImportError: - return do_download() + return do_download() try: pkg_resources.require("setuptools>="+version); return - except pkg_resources.VersionConflict, e: + except pkg_resources.VersionConflict as e: if was_imported: print >>sys.stderr, ( "The required version of setuptools (>=%s) is not available, and\n" @@ -154,40 +157,6 @@ def download_setuptools( return os.path.realpath(saveto) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - def main(argv, version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" try: @@ -226,8 +195,8 @@ def main(argv, version=DEFAULT_VERSION): from setuptools.command.easy_install import main main(argv) else: - print "Setuptools version",version,"or greater has been installed." - print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' + print("Setuptools version {0} or greater has been installed.".format(version)) + print('(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)') def update_md5(filenames): """Update our built-in md5 registry""" @@ -251,7 +220,7 @@ def update_md5(filenames): match = re.search("\nmd5_data = {\n([^}]+)}", src) if not match: - print >>sys.stderr, "Internal error!" + print("Internal error!", file=sys.stderr) sys.exit(2) src = src[:match.start(1)] + repl + src[match.end(1):] diff --git a/fixture/__init__.py b/fixture/__init__.py index 83a08f8..8eef88c 100644 --- a/fixture/__init__.py +++ b/fixture/__init__.py @@ -1,8 +1,8 @@ """fixture is a python module for loading and referencing test data -It provides several utilities for achieving a *fixed state* when testing -Python programs. Specifically, these utilities setup / teardown databases and +It provides several utilities for achieving a *fixed state* when testing +Python programs. Specifically, these utilities setup / teardown databases and work with temporary file systems. You may want to start by reading the `End User Documentation`_. @@ -22,8 +22,6 @@ """ -__version__ = "1.5" - import logging import sys @@ -32,9 +30,9 @@ from fixture.util import * from fixture.io import * from fixture.style import * +from fixture.version import __version__ def setup_test_not_supported(): """hook for setup for the test command.""" raise NotImplementedError("use: `python setup.py nosetests` instead") setup_test_not_supported.__test__ = False - diff --git a/fixture/base.py b/fixture/base.py index af3fa4b..07c1791 100644 --- a/fixture/base.py +++ b/fixture/base.py @@ -4,7 +4,9 @@ The more useful bits are in :mod:`fixture.loadable` """ -import sys, traceback +import inspect +import sys +import traceback try: from functools import wraps except ImportError: @@ -17,21 +19,18 @@ def wrap_with_f(new_f): new_f.__module__ = f.__module__ return new_f return wrap_with_f - + from fixture.dataset import SuperSet -from compiler.consts import CO_GENERATOR def is_generator(func): - try: - return func.func_code.co_flags & CO_GENERATOR != 0 - except AttributeError: - return False + return inspect.isgeneratorfunction(func) + class FixtureData(object): """ - Loads one or more DataSet objects and provides an interface into that + Loads one or more DataSet objects and provides an interface into that data. - + Typically this is attached to a concrete Fixture class and constructed by ``data = fixture.data(...)`` """ def __init__(self, datasets, dataclass, loader): @@ -42,7 +41,7 @@ def __init__(self, datasets, dataclass, loader): def __enter__(self): """enter a with statement block. - + calls self.setup() """ self.setup() @@ -50,7 +49,7 @@ def __enter__(self): def __exit__(self, type, value, traceback): """exit a with statement block. - + calls self.teardown() """ self.teardown() @@ -76,43 +75,43 @@ def teardown(self): class Fixture(object): """An environment for loading data. - + An instance of this class can safely be a module-level object. It may be more useful to use a concrete LoadableFixture, such as SQLAlchemyFixture - + Keywords arguments: - + dataclass class to instantiate with datasets (defaults to SuperSet) loader class to instantiate and load data sets with. - + """ dataclass = SuperSet loader = None Data = FixtureData - + def __init__(self, dataclass=None, loader=None): if dataclass: self.dataclass = dataclass if loader: self.loader = loader - + def __iter__(self): for k in self.__dict__: yield k - + def with_data(self, *datasets, **cfg): """returns a decorator to wrap data around a method. - + All positional arguments are DataSet class objects. - - the decorated method will receive a new first argument, + + the decorated method will receive a new first argument, the Fixture.Data instance. - + Keyword arguments: - + setup optional callable to be executed before test teardown @@ -139,14 +138,14 @@ def passthru_teardown(): if teardown: teardown() else: passthru_teardown = teardown - + def setup_data(): data = self.data(*datasets) data.setup() return data def teardown_data(data): data.teardown() - + @wraps(routine) def call_routine(*a,**kw): data = setup_data() @@ -155,21 +154,21 @@ def call_routine(*a,**kw): except KeyboardInterrupt: # user wants to abort everything : raise - except Exception, exc: + except Exception as exc: # caught exception, so try to teardown but do it safely : etype, val, tb = sys.exc_info() try: teardown_data(data) except: - t_ident = ("-----[exception in teardown %s]-----" % + t_ident = ("-----[exception in teardown %s]-----" % hex(id(teardown_data))) sys.stderr.write("\n\n%s\n" % t_ident) traceback.print_exc() sys.stderr.write("%s\n\n" % t_ident) - raise exc, None, tb + raise exc else: teardown_data(data) - + @wraps(routine) def iter_routine(): for stack in routine(): @@ -188,35 +187,35 @@ def atomic_routine(*genargs,**kw): genargs = (data,) + genargs try: fn(*genargs, **kw) - except Exception, exc: + except Exception as exc: etype, val, tb = sys.exc_info() try: teardown_data(data) except: t_ident = ( - "-----[exception in teardown %s]-----" % + "-----[exception in teardown %s]-----" % hex(id(teardown_data))) sys.stderr.write("\n\n%s\n" % t_ident) traceback.print_exc() sys.stderr.write("%s\n\n" % t_ident) - raise exc, None, tb + raise exc else: teardown_data(data) - + restack = (atomic_routine, setup_data) + args yield restack - + if is_generator(routine): wrapped_routine = iter_routine else: wrapped_routine = call_routine - - decorate = with_setup( setup=passthru_setup, + + decorate = with_setup( setup=passthru_setup, teardown=passthru_teardown ) return decorate( wrapped_routine ) return decorate_with_data - + def data(self, *datasets): """returns a :class:`FixtureData` object for datasets.""" return self.Data(datasets, self.dataclass, self.loader) - \ No newline at end of file + diff --git a/fixture/command/generate/__init__.py b/fixture/command/generate/__init__.py index 0e33d7d..634e5bc 100644 --- a/fixture/command/generate/__init__.py +++ b/fixture/command/generate/__init__.py @@ -1,14 +1,12 @@ class code_str(str): - """string that reproduces without quotes. - - """ + """string that reproduces without quotes.""" def __repr__(self): return str.__repr__(self)[1:-1] -import generate -from generate import * +from . import generate __doc__ = generate.__doc__ +from .generate import * # load modules so they can register themselves (better way?) try: @@ -18,4 +16,4 @@ def __repr__(self): try: import generate_sqlalchemy except ImportError: - pass \ No newline at end of file + pass diff --git a/fixture/command/generate/generate.py b/fixture/command/generate/generate.py index 77c6c2d..04579b3 100755 --- a/fixture/command/generate/generate.py +++ b/fixture/command/generate/generate.py @@ -6,57 +6,75 @@ """ -import sys, os, optparse, inspect, pkg_resources +import os +import six +import sys +import inspect +import optparse +import pprint +import pkg_resources from warnings import warn from fixture.command.generate.template import templates, is_template handler_registry = [] + class NoData(LookupError): """no data was returned by a query""" pass + + class HandlerException(Exception): pass + + class UnrecognizedObject(HandlerException): pass + + class UnsupportedHandler(HandlerException): pass + + class MisconfiguredHandler(HandlerException): pass - + + def register_handler(handler): handler_registry.append(handler) + def clear_handlers(): handler_registry[:] = [] + class FixtureCache(object): """cache of Fixture objects and their data sets to be generatred. - + needs to store resulting fixture object with set IDs so that foreign key data can accumulate without duplication. - - For example, if we have a product set that requires category foo - and an offer set that requires category foo, the second one loaded - needs to acknowledge that foo is already loaded and needs to obtain + + For example, if we have a product set that requires category foo + and an offer set that requires category foo, the second one loaded + needs to acknowledge that foo is already loaded and needs to obtain the key to that fixture too, to generate the right link. """ def __init__(self): self.registry = {} self.order_of_appearence = [] - - def add(self, set): - fxtid = set.obj_id() + + def add(self, result_set): + fxtid = result_set.obj_id() self.push_fxtid(fxtid) - if not self.registry.has_key(fxtid): + if fxtid not in self.registry: self.registry[fxtid] = {} - + # we want to add a new set but # MERGE in the data if the set exists. # this merge is done assuming that sets of - # the same id will always be identical + # the same id will always be identical # (which should be true for db fixtures) - self.registry[fxtid][set.set_id()] = set - + self.registry[fxtid][result_set.set_id()] = result_set + def push_fxtid(self, fxtid): o = self.order_of_appearence # keep pushing names, but keep the list unique... @@ -66,51 +84,55 @@ def push_fxtid(self, fxtid): pass o.append(fxtid) + class DataSetGenerator(object): """produces a callable object that can generate DataSet code. """ - + template = None - + def __init__(self, options, template=None): self.handler = None self.options = options self.cache = FixtureCache() if template: self.template = template - + def get_handler(self, object_path, obj=None, importable=True, **kw): """find and return a handler for object_path. - + any additional keywords will be passed into the handler's constructor - """ + """ handler = None for h in handler_registry: try: recognizes_obj = h.recognizes(object_path, obj=obj) - except UnsupportedHandler, e: + except UnsupportedHandler as e: warn("%s is unsupported (%s)" % (h, e)) continue + except Exception as e: + warn("Unknown error (%s)" % (e, )) + continue if recognizes_obj: - handler = h(object_path, self.options, + handler = h(object_path, self.options, obj=obj, template=self.template, **kw) break if handler is None: - raise UnrecognizedObject, ( + raise UnrecognizedObject( "no handler recognizes object %s at %s (importable? %s); " "tried handlers %s" % - (obj, object_path, (importable and "YES" or "NO"), + (obj, object_path, (importable and "YES" or "NO"), ", ".join([str(h) for h in handler_registry]))) return handler - + def resolve_object_path(self, object_path): """resolves an object path - + if an object path is importable, returns (True, ) otherwise, returns (False, None) """ importable = True - + path, object_name = os.path.splitext(object_path) try: if not object_name: @@ -118,18 +140,18 @@ def resolve_object_path(self, object_path): else: if object_name.startswith('.'): object_name = object_name[1:] - obj = __import__(path, globals(), locals(), [object_name]) + obj = __import__(path, globals(), locals(), [object_name]) obj = getattr(obj, object_name) except (ImportError, AttributeError): importable = False obj = None return importable, obj - + def code(self): """builds and returns code string. """ tpl = {'fxt_type': self.handler.fxt_type()} - + code = [self.template.header(self.handler)] o = [k for k in self.cache.order_of_appearence] o.reverse() @@ -137,24 +159,24 @@ def code(self): datadef = self.template.DataDef() tpl['data'] = [] tpl['fxt_class'] = self.handler.mk_class_name(kls) - + val_dict = self.cache.registry[kls] - for k,fset in val_dict.items(): + for k, fset in val_dict.items(): key = fset.mk_key() data = self.handler.resolve_data_dict(datadef, fset) tpl['data'].append((key, self.template.dict(data))) - + tpl['meta'] = "\n ".join(datadef.meta(kls)) tpl['data_header'] = "\n ".join(datadef.data_header) + "\n" tpl['data'] = self.template.data(tpl['data']) code.append(self.template.render(tpl)) - + code = "\n".join(self.template.import_header + code) return code - + def __call__(self, object_path, setup_callbacks=None): """uses data obj to generate code for a fixture. - + returns code string. """ importable, obj = self.resolve_object_path(object_path) @@ -167,18 +189,19 @@ def __call__(self, object_path, setup_callbacks=None): self.handler.begin() try: self.handler.findall(self.options.where) - def cache_set(s): + + def cache_set(s): self.cache.add(s) - for (k,v) in s.data_dict.items(): + for (k, v) in s.data_dict.items(): if isinstance(v, FixtureSet): f_set = v cache_set(f_set) - + # need to loop through all sets, - # then through all set items and add all sets of all + # then through all set items and add all sets of all # foreign keys and their foreign keys. # got it??? - + for s in self.handler.sets(): cache_set(s) except: @@ -186,104 +209,106 @@ def cache_set(s): raise else: self.handler.commit() - + return self.code() + class FixtureSet(object): """a key, data_dict pair for a set in a fixture. - + takes a data attribute which must be understood by the concrete FixtureSet """ - + def __init__(self, data): self.data = data self.data_dict = {} - + def __repr__(self): - return "<%s at %s for data %s>" % ( - self.__class__.__name__, hex(id(self)), - pprint.pformat(self.data_dict)) - + return "<%s at %s for data %s>" % (self.__class__.__name__, + hex(id(self)), + pprint.pformat(self.data_dict)) + def attr_to_db_col(self, col): """returns a database column name for a fixture set's attribute. - + this is only useful for sqlobject in how it wants camel case. """ return col - + def get_id_attr(self): """returns the name of this set's id attribute. - + i.e. "id" """ raise NotImplementedError - + def mk_key(self): """return a unique key for this fixture set. - + i.e. _ """ return "_".join(str(s) for s in ( self.mk_var_name(), self.set_id())) - + def mk_var_name(self): """returns a variable name for the instance of the fixture class. """ return self.obj_id() - + def obj_id(self): """returns a unique value that identifies the object used to generate this fixture. - + by default this is the name of the data model, i.e. Employees """ return self.model.__name__ - + def set_id(self): """returns a unique value that identifies this set within its class. - + i.e. primary key for the row """ raise NotImplementedError + class HandlerType(type): def __str__(self): # split camel class name into something readable? return self.__name__ -class DataHandler(object): + +class DataHandler(six.with_metaclass(HandlerType)): """handles an object that can provide fixture data. """ - __metaclass__ = HandlerType loadable_fxt_class = None - + def __init__(self, object_path, options, obj=None, template=None): self.obj_path = object_path self.obj = obj self.options = options self.template = template - + def begin(self): """called once when starting to build a fixture. """ self.template.begin() - + def commit(self): """called after performing any action successfully.""" pass - + def find(self, idval): """finds a record set based on key, idval.""" raise NotImplementedError - + def findall(self, query): """finds all records based on parameters.""" raise NotImplementedError - + def fxt_type(self): """returns name of the type of Fixture class for this data object.""" - + def mk_class_name(self, name_or_fset): """returns a fixture class for the fixture set. """ @@ -292,111 +317,103 @@ def mk_class_name(self, name_or_fset): else: obj_name = name_or_fset return "%s%s%s" % (self.options.prefix, obj_name, self.options.suffix) - + @staticmethod def recognizes(object_path, obj): """return True if self can handle this object_path/object. """ - raise NotImplementedError - + raise NotImplementedError + def resolve_data_dict(self, datadef, fset): """given a fixture set, resolve the linked sets in the data_dict and log any necessary headers. - + return the data_dict - """ + """ self.add_fixture_set(fset) - + # this is the dict that defines all keys/vals for - # the row. note that the only thing special we + # the row. note that the only thing special we # want to do is turn all foreign key values into - # code strings - - for k,v in fset.data_dict.items(): + # code strings + + for k, v in fset.data_dict.items(): if isinstance(v, FixtureSet): # then it's a foreign key link linked_fset = v self.add_fixture_set(linked_fset) - + fxt_class = self.mk_class_name(linked_fset) - datadef.add_reference( fxt_class, - fxt_var = linked_fset.mk_var_name() ) + datadef.add_reference(fxt_class, + fxt_var=linked_fset.mk_var_name()) fset.data_dict[k] = datadef.fset_to_attr(linked_fset, fxt_class) - + return fset.data_dict - + def rollback(self): """called after any action raises an exception.""" pass - + def sets(self): """yield a FixtureSet for each set in obj.""" raise NotImplementedError + def dataset_generator(argv): """%prog [options] OBJECT_PATH - - Using the object specified in the path, generate DataSet classes (code) to + + Using the object specified in the path, generate DataSet classes (code) to reproduce its data. An OBJECT_PATH can be a python path or a file path or anything else that a handler can recognize. - - When targetting Python objects the OBJECT_PATH is dot separated. + + When targetting Python objects the OBJECT_PATH is dot separated. For example, targetting the Employee class in models.py would look like: - + directory_app.models.Employee - + """ - parser = optparse.OptionParser( - usage=(inspect.getdoc(dataset_generator))) + parser = optparse.OptionParser(usage=(inspect.getdoc(dataset_generator))) parser.add_option('--dsn', - help="Sets db connection for a handler that uses a db") - parser.add_option('-w','--where', - help="SQL where clause, i.e. \"id = 1705\" ") - + help="Sets db connection for a handler that uses a db") + parser.add_option('-w', '--where', + help="SQL where clause, i.e. \"id = 1705\" ") + d = "Data" parser.add_option('--suffix', - help = ( - "String suffix for all dataset class names " - "(default: %s; i.e. an Employee object becomes EmployeeData)" % d), - default=d) + help=("String suffix for all dataset class names " + "(default: %s; i.e. an Employee object becomes EmployeeData)" % d), + default=d) parser.add_option('--prefix', - help="String prefix for all dataset class names (default: None)", - default="") - - parser.add_option('--env', - help = ( - "Module path to use as an environment for finding objects. " - "declaring multiple --env values will be recognized"), - action='append', default=[]) - - parser.add_option('--require-egg', - dest='required_eggs', - help = ( - "A requirement string to enable importing from a module that was " - "installed in multi-version mode by setuptools. I.E. foo==1.0. " - "You can repeat this option as many times as necessary."), - action='append', default=[]) - + help="String prefix for all dataset class names (default: None)", + default="") + + parser.add_option('--env', action='append', default=[], + help=("Module path to use as an environment for finding objects. " + "declaring multiple --env values will be recognized")) + + parser.add_option('--require-egg', dest='required_eggs', action='append', default=[], + help=("A requirement string to enable importing from a module that was " + "installed in multi-version mode by setuptools. I.E. foo==1.0. " + "You can repeat this option as many times as necessary.")) + default_tpl = templates.default() parser.add_option('--template', - help="Template to use; choices: %s, default: %s" % ( - tuple([t for t in templates]), default_tpl), - default=default_tpl) - + help="Template to use; choices: %s, default: %s" % (tuple([t for t in templates]), default_tpl), + default=default_tpl) + parser.add_option("-c", "--connect", - metavar="FUNCTION_PATH", action="append", default=[], - help=( "Path to a function that performs a custom connection, accepting a single " - "parameter, DSN. I.E. 'some_module.submod:connect' will be called as connect(DSN). " - "Called *after* OBJECT_PATH is imported but *before* any queries are made. " - "This option can be declared multiple times.")) + metavar="FUNCTION_PATH", + help=("Path to a function that performs a custom connection, accepting a single" + "parameter, DSN. I.E. 'some_module.submod:connect' will be called as connect(DSN). " + "Called *after* OBJECT_PATH is imported but *before* any queries are made.")) parser.add_option("-s", "--setup", - metavar="FUNCTION_PATH", action="append", default=[], - help=( "Path to a function that sets up data objects, accepting no parameters. " - "I.E. 'some_module.submod:setup_all' will be called as setup_all(). " - "Called *after* OBJECT_PATH is imported but *before* any queries are made " - "and *before* connect(DSN) is called. " - "This option can be declared multiple times.")) - + metavar="FUNCTION_PATH", action="append", default=[], + help=("Path to a function that sets up data objects, accepting no parameters. " + "I.E. 'some_module.submod:setup_all' will be called as setup_all(). " + "Called *after* OBJECT_PATH is imported but *before* any queries are made " + "and *before* connect(DSN) is called. " + "This option can be declared multiple times.")) + # parser.add_option('--show_query_only', action='store_true', # help="prints out query generated by sqlobject and exits") # parser.add_option('-c','--clause_tables', default=[], @@ -405,20 +422,20 @@ def dataset_generator(argv): # help="max results to return") # parser.add_option('-s','--order_by', # help="orderBy=ORDER_BY") - + (options, args) = parser.parse_args(argv) try: object_path = args[0] except IndexError: parser.error('incorrect arguments') - + curr_opt, curr_path, setup_callbacks = None, None, None try: curr_opt = '--connect' - for path in options.connect: - curr_path = path - connect = resolve_function_path(path) - connect(options.dsn) + if options.connect: + curr_path = options.connect + connect = resolve_function_path(options.connect) + setattr(options, 'connection', connect(options.dsn)) curr_opt = '--setup' setup_callbacks = [] for path in options.setup: @@ -427,19 +444,21 @@ def dataset_generator(argv): except ImportError: etype, val, tb = sys.exc_info() parser.error("%s=%s %s: %s" % (curr_opt, curr_path, etype.__name__, val)) - + try: - return get_object_data(object_path, options, setup_callbacks=setup_callbacks) + return get_object_data(object_path, options, + setup_callbacks=setup_callbacks) except (MisconfiguredHandler, NoData, UnrecognizedObject): etype, val, tb = sys.exc_info() parser.error("%s: %s" % (etype.__name__, val)) + def resolve_function_path(path): if ':' in path: mod, obj = path.split(':') else: mod, obj = path, None - fn = __import__(mod, globals(),globals(), [obj]) + fn = __import__(mod, globals(), globals(), [obj]) if obj is not None: parts = obj.split('.') last_attr = fn @@ -450,8 +469,9 @@ def resolve_function_path(path): fn = last_attr return fn + def get_object_data(object_path, options, setup_callbacks=None): - """query object at object_path and return generated code + """query object at object_path and return generated code representing its data """ for egg in options.required_eggs: @@ -464,6 +484,7 @@ def get_object_data(object_path, options, setup_callbacks=None): generate.template = templates.find(options.template) return generate(object_path, setup_callbacks=setup_callbacks) + def main(argv=sys.argv[1:]): if '__testmod__' in argv: # sorry this is all I can think of at the moment :( @@ -475,8 +496,8 @@ def main(argv=sys.argv[1:]): finally: teardown_examples() return - print( dataset_generator(argv)) + print(dataset_generator(argv)) return 0 if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/fixture/command/generate/generate_sqlalchemy.py b/fixture/command/generate/generate_sqlalchemy.py index 8c703f3..a317e9f 100644 --- a/fixture/command/generate/generate_sqlalchemy.py +++ b/fixture/command/generate/generate_sqlalchemy.py @@ -1,16 +1,21 @@ -import sys, inspect +import six +import sys +import inspect from fixture.command.generate import ( - DataHandler, register_handler, FixtureSet, NoData, UnsupportedHandler) + DataHandler, register_handler, FixtureSet, NoData, UnsupportedHandler, + MisconfiguredHandler) from fixture import SQLAlchemyFixture + try: import sqlalchemy except ImportError: sqlalchemy = False + class TableEnv(object): """a shared environment of sqlalchemy Table instances. - + can be initialized with python paths to objects or objects themselves """ def __init__(self, *objects): @@ -18,7 +23,7 @@ def __init__(self, *objects): self.tablemap = {} for obj in self.objects: module = None - if isinstance(obj, basestring): + if isinstance(obj, six.string_types): modpath = obj if modpath not in sys.modules: # i.e. modpath from command-line option... @@ -26,40 +31,38 @@ def __init__(self, *objects): if "." in modpath: cut = modpath.rfind(".") names = [modpath[cut+1:]] - parent = __import__( - modpath[0:cut], globals(), locals(), names) + parent = __import__(modpath[0:cut], globals(), + locals(), names) module = getattr(parent, names[0]) else: module = __import__(modpath) except: etype, val, tb = sys.exc_info() - raise ( - ImportError("%s: %s (while importing %s)" % ( - etype, val, modpath)), None, tb) + raise ImportError("%s: %s (while importing %s)" % ( + etype, val, modpath)) else: module = sys.modules[modpath] obj = module if module is None: module = inspect.getmodule(obj) self._find_objects(obj, module) - + def __contains__(self, key): return key in self.tablemap - + def __getitem__(self, table): try: return self.tablemap[table] except KeyError: etype, val, tb = sys.exc_info() - raise LookupError, ( - "Could not locate original declaration of Table %s " - "(looked in: %s) You might need to add " - "--env='path.to.module'?" % ( - table, ", ".join([repr(p) for p in self.objects]))), tb - + raise LookupError("Could not locate original declaration of Table %s " + "(looked in: %s) You might need to add " + "--env='path.to.module'?" % (table, ", ".join([repr(p) for p in self.objects])), + tb) + def _find_objects(self, obj, module): from sqlalchemy.schema import Table - + # get dict key/vals or dir() through object ... if not hasattr(obj, 'items'): def getitems(): @@ -68,9 +71,20 @@ def getitems(): else: getitems = obj.items for name, o in getitems(): + # if we get declarative base then we look for table object. + o = getattr(o, '__table__', o) if isinstance(o, Table): self.add_table(o, name=name, module=module) - + if self._is_sa_mapped(o): + self.add_table(o.__table__, name=o.__tablename__, module=module) + + def _is_sa_mapped(self, cls): + try: + sqlalchemy.orm.util.class_mapper(cls) + return True + except: + return False + def add_table(self, table_obj, name=None, module=None): if not name: # sqlalchemy 0.4 and ?? @@ -78,73 +92,78 @@ def add_table(self, table_obj, name=None, module=None): self.tablemap.setdefault(table_obj, {}) self.tablemap[table_obj]['name'] = name self.tablemap[table_obj]['module'] = module - + def get_real_table(self, table): return getattr(self[table]['module'], self[table]['name']) + class SQLAlchemyHandler(DataHandler): """handles genration of fixture code from a sqlalchemy data source.""" - + loadable_fxt_class = SQLAlchemyFixture - + class RecordSetAdapter(object): - """adapts a sqlalchemy record set object for use in a + """adapts a sqlalchemy record set object for use in a SQLAlchemyFixtureSet.""" columns = None + def __init__(self, obj): raise NotImplementedError("not a concrete implementation") - + def primary_key_from_instance(self, data): raise NotImplementedError - + def __init__(self, object_path, options, connection=None, **kw): from sqlalchemy import MetaData, create_engine from sqlalchemy.orm import sessionmaker, scoped_session - + self.engine = None - self.connection = connection + self.connection = connection or options.connection + super(SQLAlchemyHandler, self).__init__(object_path, options, **kw) if not self.connection: if not self.options.dsn: - raise MisconfiguredHandler( - "--dsn option is required by %s" % self.__class__) - + raise MisconfiguredHandler("--dsn option is required by %s" % self.__class__) + self.engine = create_engine(self.options.dsn) self.connection = self.engine self.meta = MetaData(bind=self.engine) ################################################ - if self.options.dsn.startswith('postgres'): + if self.options.dsn.startswith('postgres'): # postgres will put everything in a transaction, even after a commit, # and it seems that this makes it near impossible to drop tables after a test # (deadlock), so let's fix that... import psycopg2.extensions self.connection.raw_connection().set_isolation_level( - psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) + psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) ################################################ - - Session = scoped_session(sessionmaker(autoflush=True, transactional=False, bind=self.engine)) + + # create a configured "Session" class + Session = sessionmaker(bind=self.connection) + + # create a Session self.session = Session() - + self.env = TableEnv(*[self.obj.__module__] + self.options.env) - + def add_fixture_set(self, fset): t = self.env[fset.obj.table] - self.template.add_import("from %s import %s" % ( - t['module'].__name__, t['name'])) - - def begin(self, *a,**kw): - DataHandler.begin(self, *a,**kw) - + self.template.add_import("from %s import %s" % (t['module'].__name__, + t['name'])) + + def begin(self, *a, **kw): + DataHandler.begin(self, *a, **kw) + def commit(self): pass - + def rollback(self): pass - + def find(self, idval): self.rs = [self.obj.get(idval)] return self.rs - + def findall(self, query=None): """gets record set for query.""" session = self.session @@ -155,11 +174,11 @@ def findall(self, query=None): if not self.rs.count(): raise NoData("no data for query \"%s\" on %s, handler=%s" % (query, self.obj, self.__class__)) return self.rs - + @staticmethod def recognizes(object_path, obj=None): """returns True if obj is not None. - + this method is just a starting point for sqlalchemy handlers. """ if not sqlalchemy: @@ -167,110 +186,109 @@ def recognizes(object_path, obj=None): if obj is None: return False return True - + def sets(self): """yields FixtureSet for each row in SQLObject.""" - + for row in self.rs: yield SQLAlchemyFixtureSet(row, self.obj, self.connection, self.env, - adapter=self.RecordSetAdapter) + adapter=self.RecordSetAdapter) + + +def obj_lookup_table(obj): + from sqlalchemy.orm.mapper import class_mapper + mapper = class_mapper(obj) + + if obj.__table__ is not None: + table = obj.__table__ + elif mapper.local_table: + table = mapper.local_table + elif mapper.select_table: + table = mapper.select_table + else: + raise LookupError("not sure how to get a table " + "from mapper %s" % mapper) + return table + class SQLAlchemyMappedClassBase(SQLAlchemyHandler): + class RecordSetAdapter(SQLAlchemyHandler.RecordSetAdapter): def __init__(self, obj): - self.columns = obj.c - - # could grab this from the Handler : - from sqlalchemy.orm.mapper import object_mapper - self.mapper = object_mapper(obj()) - - if self.mapper.local_table: - self.table = self.mapper.local_table - elif self.mapper.select_table: - self.table = self.mapper.select_table - else: - raise LookupError( - "not sure how to get a table from mapper %s" % - self.mapper) - + self.table = obj_lookup_table(obj) + self.columns = self.table.c self.id_attr = self.table.primary_key.columns.keys() - + def primary_key_from_instance(self, data): - return self.mapper.primary_key_from_instance(data) - + return tuple(getattr(data, primary_key.name) + for primary_key in self.table.primary_key) + def __init__(self, *args, **kw): super(SQLAlchemyMappedClassBase, self).__init__(*args, **kw) - - from sqlalchemy.orm.mapper import class_mapper - self.mapper = class_mapper(self.obj) - - if self.mapper.local_table: - self.table = self.mapper.local_table - elif self.mapper.select_table: - self.table = self.mapper.select_table - else: - raise LookupError( - "not sure how to get a table from mapper %s" % - self.mapper) - - def find(self, idval): + self.table = obj_lookup_table(self.obj) + + def find(self, idval): q = self.session.query(self.obj) - primary_keys = self.table.primary_key.columns.keys() # I think this is 0.4 only + primary_keys = self.table.primary_key.columns.keys() # I think this is 0.4 only try: len(idval) except TypeError: idval = [idval] - assert len(primary_keys) == len(idval), ( - "length of idval did not match length of the table's primary keys (%s ! %s)" % ( - primary_keys, idval)) + assert len(primary_keys) == len(idval), ("length of idval did not " + "match length of the table's " + "primary keys (%s ! %s)" % + (primary_keys, idval)) table_cols = self.table.c for i, keyname in enumerate(primary_keys): q = q.filter(getattr(table_cols, keyname) == idval[i]) - + self.rs = q.all() return self.rs - + def findall(self, query=None): """gets record set for query.""" session = self.session + if query: - self.rs = session.query(self.obj).filter(query) + self.rs = session.query(self.table).filter(query).all() else: - self.rs = session.query(self.obj) - if not self.rs.count(): + self.rs = session.query(self.table).all() + if not self.rs: raise NoData("no data for query \"%s\" on %s, handler=%s" % (query, self.obj, self.__class__)) return self.rs -## NOTE: the order that handlers are registered in is important for discovering +## NOTE: the order that handlers are registered in is important for discovering ## sqlalchemy types... -class SQLAlchemySessionMapperHandler(SQLAlchemyMappedClassBase): + +class SQLAlchemySessionMapperHandler(SQLAlchemyMappedClassBase): """handles a scoped session mapper - + that is, one created with sqlalchemy.orm.scoped_session(sessionmaker(...)).mapper() - - """ - + + """ + @staticmethod def recognizes(object_path, obj=None): if not SQLAlchemyHandler.recognizes(object_path, obj=obj): return False - + if not SQLAlchemyMappedClassHandler.recognizes(object_path, obj=obj): return False - + # OK, so it is a mapped class - if (hasattr(obj, 'query') and - getattr(obj.query, '__module__', '').startswith('sqlalchemy')): - # sort of hoky but 0.5 proxies query and + if (hasattr(obj, 'query') and + getattr(obj.query, '__module__', '').startswith('sqlalchemy')): + # sort of hoky but 0.5 proxies query and # query.mapper so we can't check types return True - + return False - + register_handler(SQLAlchemySessionMapperHandler) -class SQLAlchemyTableHandler(SQLAlchemyHandler): + +class SQLAlchemyTableHandler(SQLAlchemyHandler): class RecordSetAdapter(SQLAlchemyHandler.RecordSetAdapter): def __init__(self, obj): self.table = obj @@ -279,18 +297,18 @@ def __init__(self, obj): if len(keys) != 1: raise ValueError("unsupported primary key type %s" % keys) self.id_attr = keys[0].key - + def primary_key_from_instance(self, data): key_str = [] for k in self.table.primary_key: - key_str.append(str(getattr(data, k.key))) + key_str.append(str(getattr(data, k.key, None))) return "_".join(key_str) - + @staticmethod def recognizes(object_path, obj=None): if not SQLAlchemyHandler.recognizes(object_path, obj=obj): return False - + from sqlalchemy.schema import Table if isinstance(obj, Table): raise NotImplementedError( @@ -298,18 +316,19 @@ def recognizes(object_path, obj=None): "Please use a mapped class or mapper object instead. Or, " "consider submitting a patch to support this.") return True - + return False - + register_handler(SQLAlchemyTableHandler) + class SQLAlchemyMappedClassHandler(SQLAlchemyMappedClassBase): - + @staticmethod def recognizes(object_path, obj=None): if not SQLAlchemyHandler.recognizes(object_path, obj=obj): return False - + from sqlalchemy.orm import class_mapper try: class_mapper(obj) @@ -318,17 +337,16 @@ def recognizes(object_path, obj=None): return False else: return True - + return False - + register_handler(SQLAlchemyMappedClassHandler) class SQLAlchemyFixtureSet(FixtureSet): """a fixture set for a sqlalchemy record set.""" - + def __init__(self, data, obj, connection, env, adapter=None): - # print data, model FixtureSet.__init__(self, data) self.env = env self.connection = connection @@ -336,23 +354,28 @@ def __init__(self, data, obj, connection, env, adapter=None): self.obj = adapter(obj) else: self.obj = obj - ## do we add table objects? elixir Entity classes get the Entity.table attribute + ## do we add table objects? elixir Entity classes get the + ## Entity.table attribute # if self.obj.table not in self.env: # self.env.add_table(self.obj.table) self.primary_key = None - + self.data_dict = {} - for col in self.obj.columns: + if getattr(self.obj, 'mapper', False): + columns = self.obj.mapper.columns._data + else: + columns = {col.name: col for col in self.obj.columns} + + for col_name, col in columns.iteritems(): sendkw = {} for fk in col.foreign_keys: sendkw['foreign_key'] = fk - - val = self.get_col_value(col.name, **sendkw) + val = self.get_col_value(col_name, **sendkw) self.data_dict[col.name] = val - + def attr_to_db_col(self, col): return col.name - + def get_col_value(self, colname, foreign_key=None): """transform column name into a value or a new set if it's a foreign key (recursion). @@ -362,31 +385,32 @@ def get_col_value(self, colname, foreign_key=None): # this means that we are in a NULL column or foreign key # which could be perfectly legal. return None - + if foreign_key: - from sqlalchemy.ext.assignmapper import assign_mapper - from sqlalchemy.ext.sqlsoup import class_for_table - table = foreign_key.column.table - stmt = table.select(getattr(table.c, foreign_key.column.key)==value) + stmt = table.select(getattr(table.c, + foreign_key.column.key) == value) rs = self.connection.execute(stmt) - + + fk_value = rs.fetchone() + if fk_value is None: + return None + # adapter is always table adapter here, since that's # how we obtain foreign keys... - subset = SQLAlchemyFixtureSet( - rs.fetchone(), table, self.connection, self.env, - adapter=SQLAlchemyTableHandler.RecordSetAdapter) + subset = SQLAlchemyFixtureSet(fk_value, table, self.connection, + self.env, adapter=SQLAlchemyTableHandler.RecordSetAdapter) return subset - + return value - + def get_id_attr(self): return self.obj.id_attr - + def obj_id(self): return self.env[self.obj.table]['name'] - + def set_id(self): """returns id of this set (the primary key value).""" compid = self.obj.primary_key_from_instance(self.data) - return "_".join([str(i) for i in compid]) \ No newline at end of file + return "_".join([str(i) for i in compid]) diff --git a/fixture/dataset/dataset.py b/fixture/dataset/dataset.py index 94b419d..f22f42b 100644 --- a/fixture/dataset/dataset.py +++ b/fixture/dataset/dataset.py @@ -1,18 +1,19 @@ - """Representations of Data -The main class you will work with is :class:`DataSet` but there are a +The main class you will work with is :class:`DataSet` but there are a few variations on it: :class:`SuperSet` and :class:`MergedSuperSet` """ -import sys, types +import six +import sys +import types from fixture.util import ObjRegistry class DataContainer(object): """ Contains data accessible by attribute and/or key. - + for all internally used attributes, use the inner class Meta. On instances, use self.meta instead. """ @@ -20,24 +21,24 @@ class DataContainer(object): class Meta: data = None keys = None - + def __init__(self, data=None, keys=None): lazy_meta(self) - if not data: + if not data: data = {} self.meta.data = data - if not keys: + if not keys: keys = [] self.meta.keys = keys - + def __contains__(self, name): """True if name is a known key""" return name in self.meta.keys - + def __getitem__(self, key): """self['foo'] returns self.meta.data['foo']""" return self.meta.data[key] - + def __getattribute__(self, name): """Attributes are always fetched first from self.meta.data[name] if possible""" # it is necessary to completely override __getattr__ @@ -48,7 +49,7 @@ def __getattribute__(self, name): return self.meta.data[name] except KeyError: raise AttributeError("%s has no attribute '%s'" % (self, name)) - + def __repr__(self): if hasattr(self, 'meta'): keys = self.meta.keys @@ -57,11 +58,11 @@ def __repr__(self): return "<%s at %s with keys %s>" % ( self.__class__.__name__, hex(id(self)), keys) - + def get(self, k, default=None): """self.meta.get(k, default)""" return self.meta.data.get(k, default) - + def _setdata(self, key, value): """Adds value to self.meta.data[key]""" if key not in self.meta.data: @@ -73,7 +74,7 @@ class RefValue(object): def __init__(self, ref, attr_name): self.attr_name = attr_name self.ref = ref - + def __repr__(self): return "<%s.%s for %s.%s.%s (%s)>" % ( Ref.__name__, self.__class__.__name__, @@ -82,16 +83,16 @@ def __repr__(self): def __get__(self, obj, type=None): """Returns the :class:`Ref` instance or a value stored in the dataset. - - The value returned depends on how this instance of :class:`RefValue` is - accessed. - - Read more about the ``__get__`` `descriptor`_ to understand how it works or read + + The value returned depends on how this instance of :class:`RefValue` is + accessed. + + Read more about the ``__get__`` `descriptor`_ to understand how it works or read some `in-depth descriptor examples`_. - + .. _descriptor: http://docs.python.org/ref/descriptors.html .. _in-depth descriptor examples: http://users.rcn.com/python/download/Descriptor.htm - + """ if obj is None: # self was assigned to a class object @@ -110,43 +111,43 @@ def __get__(self, obj, type=None): class Ref(object): """A reference to a row in a DataSet class. - + An instance of this class is accessible on the inner class (a row) in a :class:`DataSet` as :class:`Row.ref() ` - - This allows a DataSet to reference an id column of a "foreign key" DataSet + + This allows a DataSet to reference an id column of a "foreign key" DataSet before it exists. - - Ref is a Descriptor containing a deferred value to an attribute of a data - object (like an instance of a SQLAlchemy mapped class). It provides the - DataSet a way to cloak the fact that "id" is an attribute only populated - after said data object is saved to the database. In other words, the - DataSet doesn't know or care when it has been loaded or not. It thinks it - is referencing "id" all the same. The timing of when id is accessed is + + Ref is a Descriptor containing a deferred value to an attribute of a data + object (like an instance of a SQLAlchemy mapped class). It provides the + DataSet a way to cloak the fact that "id" is an attribute only populated + after said data object is saved to the database. In other words, the + DataSet doesn't know or care when it has been loaded or not. It thinks it + is referencing "id" all the same. The timing of when id is accessed is handled by the LoadableFixture. - + """ Value = RefValue - + def __init__(self, dataset_class, row): self.dataset_class = dataset_class self.dataset_obj = None self.row = row # i.e. the name of the row class... self.key = self.row.__name__ - + def __call__(self, ref_name): """Return a :class:`RefValue` instance for ref_name""" return self.Value(self, ref_name) - + def __repr__(self): return "<%s to %s.%s at %s>" % ( - self.__class__.__name__, self.dataset_class.__name__, + self.__class__.__name__, self.dataset_class.__name__, self.row.__name__, hex(id(self))) def is_row_class(attr): attr_type = type(attr) - return ((attr_type==types.ClassType or attr_type==type) and - attr.__name__ != 'Meta' and + return ((issubclass(attr_type, six.class_types) or attr_type==type) and + attr.__name__ != 'Meta' and not issubclass(attr, DataContainer.Meta)) class DataType(type): @@ -154,75 +155,75 @@ class DataType(type): Meta class for creating :class:`DataSet` classes. """ default_primary_key = ['id'] - + def __init__(cls, name, bases, cls_attr): super(DataType, cls).__init__(name, bases, dict) - + if 'Meta' in cls_attr and hasattr(cls_attr['Meta'], 'primary_key'): cls_attr['_primary_key'] = cls_attr['Meta'].primary_key else: cls_attr['_primary_key'] = cls.default_primary_key - + # just like dir(), we should do this in alpha order : ## NOTE: dropping support for <2.4 here... for name in sorted(cls_attr.keys()): attr = cls_attr[name] if is_row_class(attr): cls.decorate_row(attr, name, bases, cls_attr) - + del cls_attr['_primary_key'] - + def decorate_row(cls, row, name, bases, cls_attr): """Each row (an inner class) assigned to a :class:`DataSet` will be customized after it is created. - + This is because it's easier to type:: - + class MyData(DataSet): class foo: col1 = "bz" col2 = "bx" - + ... than it is to type: - + class MyData(DataSet): class foo(Row): col1 = "bz" col2 = "bx" - + (Note the subclassing that would be required in inner classes without this behavior.) - + But more importantly, rows must be able to inherit from other rows, like:: - + class MyData(DataSet): class joe: first_name = "Joe" last_name = "Phelps" class joe_gibbs(joe): last_name = "Gibbs" - + Here is what happens to each inner class object as it is assigned to a :class:`DataSet`: - + 1. A ``Row._dataset`` property is added which is a reference to the :class:`DataSet` instance. 2. A ``Row.ref()`` property (instance of :class:`Ref`) is added - 3. Any database primary key inherited from another Row is de-referenced - since primary keys must be unique per row. See :ref:`Using Dataset ` for an + 3. Any database primary key inherited from another Row is de-referenced + since primary keys must be unique per row. See :ref:`Using Dataset ` for an example of referencing primary key values that may or may not exist yet. - - + + """ # store a backref to the container dataset row._dataset = cls - + # bind a ref method row.ref = Ref(cls, row) - + # fix inherited primary keys names_to_uninherit = [] for name in dir(row): if name in cls_attr['_primary_key']: if name not in row.__dict__: - # then this was an inherited value, so we need to nullify it - # without 1) disturbing the other inherited values and 2) + # then this was an inherited value, so we need to nullify it + # without 1) disturbing the other inherited values and 2) # disturbing the inherited class. is this nuts? names_to_uninherit.append(name) bases_to_replace = [] @@ -238,22 +239,21 @@ class joe_gibbs(joe): new_bases = [b for b in row.__bases__] for base_c, base_pos in bases_to_replace: # this may not work if the row's base was a new-style class - new_base = types.ClassType( - base_c.__name__, base_c.__bases__, + new_base = type(base_c.__name__, base_c.__bases__, dict([(k, getattr(base_c, k)) for k in dir(base_c) \ if not k.startswith('_') and \ k not in names_to_uninherit])) new_bases[base_pos] = new_base if new_bases: row.__bases__ = tuple(new_bases) - + def is_rowlike(candidate): """returns True if candidate is *like* a DataRow. - + Not to be confused with issubclass(candidate, DataRow). - - A regular or new-style class is row-like because DataSet objects allow any + + A regular or new-style class is row-like because DataSet objects allow any type of class to declare a row of data """ return hasattr(candidate, '_dataset') and type(candidate._dataset) in ( @@ -264,16 +264,16 @@ class DataRow(object): a DataSet row, values accessible by attibute or key. """ _reserved_attr = ('columns',) - + def __init__(self, dataset): object.__setattr__(self, '_dataset', dataset) # i.e. the name of the row class... object.__setattr__(self, '_key', self.__class__.__name__) - + def __getitem__(self, item): """self['foo'] works the same as self.foo""" return getattr(self, item) - + def __getattr__(self, name): """Undefined attributes are fetched from the actual data object stored for this row.""" # an undefined data attribute was referenced, @@ -282,13 +282,13 @@ def __getattr__(self, name): # created only after load if name.startswith('_'): return object.__getattribute__(self, name) - + obj = self._dataset.meta._stored_objects.get_object(self._key) return getattr(obj, name) - + @classmethod def columns(self): - """Classmethod that yields all attribute names (except reserved attributes) + """Classmethod that yields all attribute names (except reserved attributes) in alphabetical order """ for k in dir(self): @@ -302,27 +302,27 @@ def __init__(self, dataset): list.__init__(self) self.dataset = dataset self._ds_key_map = {} - + def get_object(self, key): """returns the object at this key. - + In this example... - + >>> class EventData(DataSet): ... class click: ... id=1 - - ...the key is "click." The object returned would be an adapter for + + ...the key is "click." The object returned would be an adapter for EventData, probably an Event object - + """ try: return self[ self._ds_key_map[key] ] except (IndexError, KeyError): - etype, val, tb = sys.exc_info() + etype, val, _ = sys.exc_info() raise etype("row '%s' hasn't been loaded for %s (loaded: %s)" % ( - key, self.dataset, self)), None, tb - + key, self.dataset, self)) + def store(self, key, obj): self.append(obj) pos = len(self)-1 @@ -333,31 +333,31 @@ def store(self, key, obj): class DataSetMeta(DataContainer.Meta): """ Configures a DataSet class. - - When defining a :class:`DataSet` class, declare this as ``DataSet.Meta`` to configure the ``DataSet``. + + When defining a :class:`DataSet` class, declare this as ``DataSet.Meta`` to configure the ``DataSet``. The following are acknowledged attributes: ``storable`` - an object that should be used to store this :class:`DataSet`. If omitted the - loader's :class:`Style ` object will look for a storable object in its env, + an object that should be used to store this :class:`DataSet`. If omitted the + loader's :class:`Style ` object will look for a storable object in its env, using ``storable_name`` ``storable_name`` - the name of the storable object that the loader should fetch from - its env to load this ``DataSet`` with. If omitted, the loader's style - object will try to guess the storable_name based on its env and the + the name of the storable object that the loader should fetch from + its env to load this ``DataSet`` with. If omitted, the loader's style + object will try to guess the storable_name based on its env and the name of the ``DataSet`` class ``primary_key`` - this is a list of names that should be acknowledged as primary keys + this is a list of names that should be acknowledged as primary keys in a ``DataSet``. The default is simply ``['id']``. - - Here is an example of using an inner ``Meta`` class to specify a custom + + Here is an example of using an inner ``Meta`` class to specify a custom storable object to be used when storing a :class:`DataSet`:: - + >>> class RecipeStore(object): ... '''pretend this knows how to save recipes''' - ... + ... >>> class Recipes(DataSet): ... class Meta: ... storable = RecipeStore @@ -366,8 +366,8 @@ class DataSetMeta(DataContainer.Meta): ... name = "Clam Chowder" ... class tomato_bisque(chowder): ... name = "Tomato Bisque" - ... - + ... + """ row = DataRow storable = None @@ -378,66 +378,65 @@ class DataSetMeta(DataContainer.Meta): _stored_objects = None _built = False -class DataSet(DataContainer): +class DataSet(six.with_metaclass(DataType, DataContainer)): """ Defines data to be loaded - - A loader will typically want to load a dataset into a - single storage medium. I.E. a table in a database. - + + A loader will typically want to load a dataset into a + single storage medium. I.E. a table in a database. + For a complete example see :ref:`Using DataSet `. - - Note that rows are always classes until the dataset instance has been + + Note that rows are always classes until the dataset instance has been loaded:: - + >>> class Flowers(DataSet): ... class violets: ... color = 'blue' ... class roses: ... color = 'red' - ... + ... >>> f = Flowers() >>> f.violets.color 'blue' - + See :class:`DataType` for info on how inner classes are constructed. - - Row values can also be inherited from other rows, just as normal inheritance - works in Python. See the ``primary_key`` :class:`Meta ` attribute for how + + Row values can also be inherited from other rows, just as normal inheritance + works in Python. See the ``primary_key`` :class:`Meta ` attribute for how inheritance works on primary keys:: - + >>> class Recipes(DataSet): ... class chowder: ... is_soup = True ... name = "Clam Chowder" ... class tomato_bisque(chowder): ... name = "Tomato Bisque" - ... + ... >>> r = Recipes() >>> r.chowder.is_soup True >>> r.tomato_bisque.is_soup True - + Keyword Arguments: - + default_refclass A :class:`SuperSet` to use if None has already been specified in ``Meta`` - + See :class:`DataSetMeta` for details about the special inner ``Meta`` class - + See :ref:`Using Dataset ` for more examples of usage. - + """ - __metaclass__ = DataType _reserved_attr = DataContainer._reserved_attr + ('data', 'shared_instance') ref = None Meta = DataSetMeta - + def __init__(self, default_refclass=None, default_meta=None): DataContainer.__init__(self) - - # we want the convenience of not having to + + # we want the convenience of not having to # inherit DataSet.Meta. hmmm ... if not default_meta: default_meta = DataSet.Meta @@ -446,18 +445,18 @@ def __init__(self, default_refclass=None, default_meta=None): for name in dir(defaults): if not hasattr(self.meta, name): setattr(self.meta, name, getattr(defaults, name)) - + self.meta._stored_objects = DataSetStore(self) - # dereference from class ... + # dereference from class ... try: cl_attr = getattr(self.Meta, 'references') except AttributeError: cl_attr = [] setattr(self.meta, 'references', [c for c in cl_attr]) - + if not default_refclass: default_refclass = SuperSet - + def mkref(): clean_refs = [] for ds in iter(self.meta.references): @@ -466,48 +465,48 @@ def mkref(): continue clean_refs.append(ds) self.meta.references = clean_refs - + return default_refclass(*[ - ds.shared_instance(default_refclass=default_refclass) + ds.shared_instance(default_refclass=default_refclass) for ds in iter(self.meta.references) ]) - + # data def style classes, so they have refs before data is walked if len(self.meta.references) > 0: self.ref = mkref() - + for key, data in self.data(): if key in self: raise ValueError( "data() cannot redeclare key '%s' " "(this is already an attribute)" % key) - + if isinstance(data, dict): # make a new class object for the row data # so that a loaded dataset can instantiate this... data = type(key, (self.meta.row,), data) self._setdata(key, data) - + if not self.ref: # type style classes, since refs were discovered above self.ref = mkref() - + def __iter__(self): """yields keys of self.meta""" for key in self.meta.keys: yield (key, getattr(self, key)) - + def data(self): """returns iterable key/dict pairs. - + .. note:: If possible, use attribute-style definition of rows and columns instead (explained above) - - You would only need to override this if you have a DataSet that will - break unless it is ordered very specifically. Since class-style DataSet - objects are just classes with attributes, its rows will be loaded in - alphabetical order. The alternative is to define a DataSet as follows. + + You would only need to override this if you have a DataSet that will + break unless it is ordered very specifically. Since class-style DataSet + objects are just classes with attributes, its rows will be loaded in + alphabetical order. The alternative is to define a DataSet as follows. However, note that this is not as functional as a class-style DataSet:: - + >>> class Birds(DataSet): ... def data(self): ... return ( @@ -515,46 +514,46 @@ def data(self): ... name="Blue Jay")), ... ('crow', dict( ... name="Crow")),) - ... + ... >>> b = Birds() >>> b.blue_jay.name 'Blue Jay' >>> b.crow.name 'Crow' - + """ if self.meta._built: for k,v in self: yield (k,v) - + def public_dir(obj): for name in dir(obj): if name.startswith("_"): continue yield name - + def add_ref_from_rowlike(rowlike): if rowlike._dataset not in self.meta.references: self.meta.references.append(rowlike._dataset) - + empty = True for name in public_dir(self.__class__): val = getattr(self.__class__, name) if not is_row_class(val): continue - + empty = False key = name row_class = val row = {} - + for col_name in public_dir(row_class): col_val = getattr(row_class, col_name) - + if isinstance(col_val, Ref): # the .ref attribute continue - elif type(col_val) in (types.ListType, types.TupleType): + elif type(col_val) in (list, tuple): for c in col_val: if is_rowlike(c): add_ref_from_rowlike(c) @@ -562,8 +561,8 @@ def add_ref_from_rowlike(rowlike): # could definitely break any other storage mediums # ListProperty supports quite a few more types than these # see appengine.ext.db._ALLOWED_PROPERTY_TYPES - elif type(c) in (types.StringType, types.UnicodeType, types.BooleanType, - types.FloatType, types.IntType): + elif type(c) in six.string_types + six.integer_types + ( + bool, float): continue else: raise TypeError( @@ -577,18 +576,18 @@ def add_ref_from_rowlike(rowlike): if ref.dataset_class not in self.meta.references: # store the reference: self.meta.references.append(ref.dataset_class) - + row[col_name] = col_val yield (key, row) - + if empty: raise ValueError("cannot create an empty DataSet") self.meta._built = True - + @classmethod def shared_instance(cls, **kw): """Returns or creates the singleton instance for this :class:`DataSet` class""" - # fixme: default_refclass might be in **kw. But only a loader can set a + # fixme: default_refclass might be in **kw. But only a loader can set a # refclass. hmm if cls in dataset_registry: dataset = dataset_registry[cls] @@ -600,91 +599,91 @@ def shared_instance(cls, **kw): class DataSetContainer(object): """ A ``DataSet`` of :class:`DataSet` classes - + yields :class:`DataSet` classes when itered over. """ class Meta: datasets = None dataset_keys = None - + def __init__(self): lazy_meta(self) self.meta.datasets = {} self.meta.dataset_keys = [] self.meta._cache = ObjRegistry() - + def __iter__(self): """yields dataset keys""" for k in self.meta.dataset_keys: yield self.meta.datasets[k] - + def _dataset_to_key(self, dataset): """Returns a key for dataset (the name of the DataSet subclass)""" return dataset.__class__.__name__ - + def _setdataset(self, dataset, key=None, isref=False): """sets a dataset in this container. - + Returns False if DataSet has already been added and does nothing. Otherwise adds the DataSet and returns True. """ # due to reference resolution we might get colliding data sets... if dataset in self.meta._cache: return False - + if key is None: key = self._dataset_to_key(dataset) if not isref: # refs are not yielded self.meta.dataset_keys.append(key) - + self.meta.datasets[key] = dataset - + self.meta._cache.register(dataset) return True class SuperSet(DataContainer, DataSetContainer): """ A set of :class:`DataSet` classes. - + each attribute / key is a :class:`DataSet` instance. - + For example:: - + >>> from fixture import DataSet >>> from fixture.dataset import SuperSet >>> class RecipeData(DataSet): ... class tomato_bisque: ... name = "Tomato Bisque" - ... + ... >>> class CookwareData(DataSet): ... class pots: ... type = "cast-iron" - ... + ... >>> s = SuperSet(RecipeData(), CookwareData()) - + Now each instance is available by class name:: - + >>> s.RecipeData.tomato_bisque.name 'Tomato Bisque' >>> s.CookwareData.pots.type 'cast-iron' - + """ class Meta(DataContainer.Meta, DataSetContainer.Meta): pass - + def __init__(self, *datasets): DataContainer.__init__(self) DataSetContainer.__init__(self) self._store_datasets(datasets) - + def _store_datasets(self, datasets): for d in datasets: k = self._dataset_to_key(d) self._setdata(k, d) self._setdataset(d, key=k) - + for ref_d in d.ref: k = self._dataset_to_key(ref_d) self._setdata(k, ref_d) @@ -693,31 +692,31 @@ def _store_datasets(self, datasets): class MergedSuperSet(SuperSet): """ A collection of :class:`DataSet` instances. - - all attributes of all :class:`DataSet` classes are merged together so that they are + + all attributes of all :class:`DataSet` classes are merged together so that they are accessible in this class. Duplicate attribute names are not allowed. - + For example:: - + >>> from fixture import DataSet >>> from fixture.dataset import MergedSuperSet >>> class RecipeData(DataSet): ... class tomato_bisque: ... name = "Tomato Bisque" - ... + ... >>> class CookwareData(DataSet): ... class pots: ... type = "cast-iron" - ... + ... >>> m = MergedSuperSet(RecipeData(), CookwareData()) - + Now the rows of each ``DataSet`` are available as if they were rows of the ``MergedSuperSet``:: - + >>> m.tomato_bisque.name 'Tomato Bisque' >>> m.pots.type 'cast-iron' - + """ class Meta(SuperSet.Meta): pass @@ -725,7 +724,7 @@ def __init__(self, *datasets): lazy_meta(self) self.meta.keys_to_datasets = {} SuperSet.__init__(self, *datasets) - + def _setdataset(self, dataset, key=None, isref=False): if SuperSet._setdataset(self, dataset, key=key, isref=isref): for k,row in dataset: @@ -734,20 +733,20 @@ def _setdataset(self, dataset, key=None, isref=False): "cannot add key '%s' for %s because it was " "already added by %s" % ( k, dataset, self.meta.keys_to_datasets[k])) - + # need an instance here, if it's a class... if not isinstance(row, DataRow): row = row(dataset) self._setdata(k, row) - self.meta.keys_to_datasets[k] = dataset - + self.meta.keys_to_datasets[k] = dataset + def _store_datasets(self, datasets): for dataset in datasets: self._setdataset(dataset) - + for d in dataset.ref: self._setdataset(d, isref=True) - + def lazy_meta(obj): if not hasattr(obj, 'meta'): @@ -756,4 +755,4 @@ def lazy_meta(obj): if __name__ == '__main__': import doctest doctest.testmod() - + diff --git a/fixture/docs.py b/fixture/docs.py index 2f8d9dc..fa8e007 100644 --- a/fixture/docs.py +++ b/fixture/docs.py @@ -113,7 +113,7 @@ def unquot(s): try: try: main(cmdlist[1:]) - except SystemExit, e: + except SystemExit as e: returncode = e.code else: returncode = 0 @@ -188,7 +188,7 @@ def setup_command_data(): metadata.create_all() orm.mapper(Book, books) orm.mapper(Author, authors, properties={'books': orm.relation(Book, backref='author')}) - Session = orm.sessionmaker(bind=metadata.bind, autoflush=True, transactional=True) + Session = orm.sessionmaker(bind=metadata.bind, autoflush=True) session = Session() frank = Author() @@ -203,4 +203,4 @@ def setup_command_data(): session.commit() - \ No newline at end of file + diff --git a/fixture/examples/django_example/blog/templatetags/blog.py b/fixture/examples/django_example/blog/templatetags/blog.py index e80918c..9633adf 100644 --- a/fixture/examples/django_example/blog/templatetags/blog.py +++ b/fixture/examples/django_example/blog/templatetags/blog.py @@ -39,10 +39,10 @@ def get_latest_posts(parser, token): try: tag_name, arg = token.contents.split(None, 1) except ValueError: - raise template.TemplateSyntaxError, "%s tag requires arguments" % token.contents.split()[0] + raise template.TemplateSyntaxError("%s tag requires arguments" % token.contents.split()[0]) m = re.search(r'(.*?) as (\w+)', arg) if not m: - raise template.TemplateSyntaxError, "%s tag had invalid arguments" % tag_name + raise template.TemplateSyntaxError("%s tag had invalid arguments" % tag_name) format_string, var_name = m.groups() return LatestPosts(format_string, var_name) @@ -72,10 +72,10 @@ def get_blog_categories(parser, token): try: tag_name, arg = token.contents.split(None, 1) except ValueError: - raise template.TemplateSyntaxError, "%s tag requires arguments" % token.contents.split()[0] + raise template.TemplateSyntaxError("%s tag requires arguments" % token.contents.split()[0]) m = re.search(r'as (\w+)', arg) if not m: - raise template.TemplateSyntaxError, "%s tag had invalid arguments" % tag_name + raise template.TemplateSyntaxError("%s tag had invalid arguments" % tag_name) var_name = m.groups()[0] return BlogCategories(var_name) @@ -94,10 +94,14 @@ def get_links(value): try: from BeautifulSoup import BeautifulSoup except ImportError: - from beautifulsoup import BeautifulSoup + try: + from beautifulsoup import BeautifulSoup + except: + from bs4 import BeautifulSoup + soup = BeautifulSoup(value) return soup.findAll('a') except ImportError: if settings.DEBUG: - raise template.TemplateSyntaxError, "Error in 'get_links' filter: BeautifulSoup isn't installed." + raise template.TemplateSyntaxError("Error in 'get_links' filter: BeautifulSoup isn't installed.") return value diff --git a/fixture/examples/django_example/manage.py b/fixture/examples/django_example/manage.py index ce2e32c..f76832b 100755 --- a/fixture/examples/django_example/manage.py +++ b/fixture/examples/django_example/manage.py @@ -1,17 +1,17 @@ #!/usr/bin/env python +from __future__ import absolute_import + import sys import os + sys.path.append(os.path.dirname(__file__)) -from django.core.management import execute_manager -try: - import settings # Assumed to be in the same directory. -except ImportError: - import sys - sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__) - sys.exit(1) + +from django.core.management import execute_from_command_line + def main(): - execute_manager(settings) + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") + execute_from_command_line(sys.argv) if __name__ == "__main__": main() diff --git a/fixture/examples/django_example/settings.py b/fixture/examples/django_example/settings.py index fea7e68..8078218 100644 --- a/fixture/examples/django_example/settings.py +++ b/fixture/examples/django_example/settings.py @@ -9,16 +9,13 @@ MANAGERS = ADMINS +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': os.path.join(os.path.dirname(__file__), 'project.db'), + } -DATABASE_ENGINE = 'sqlite3' -DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'project.db') -# MY POSTGRES SETTINGS -#DATABASE_ENGINE = 'postgresql_psycopg2' -#DATABASE_NAME = 'fixture' # -#DATABASE_USER = 'ben' # Not used with sqlite3. -DATABASE_PASSWORD = '' # Not used with sqlite3. -DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3. -DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3. +} # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name diff --git a/fixture/io.py b/fixture/io.py index 16d90e4..bac719d 100644 --- a/fixture/io.py +++ b/fixture/io.py @@ -2,7 +2,7 @@ """Working with temporary file systems. See :ref:`Using TempIO ` for examples. - + """ __all__ = ['TempIO'] @@ -17,23 +17,23 @@ def TempIO(deferred=False, **kw): """self-destructing, temporary directory. - - Takes the same keyword args as tempfile.mkdtemp with these additional + + Takes the same keyword args as tempfile.mkdtemp with these additional keywords: - + ``deferred`` - If True, destruction will be put off until atexit. Otherwise, + If True, destruction will be put off until atexit. Otherwise, it will be destructed when it falls out of scope - + Returns an instance of :class:`DeletableDirPath` - + """ - # note that this can't be a subclass because str is silly and doesn't let + # note that this can't be a subclass because str is silly and doesn't let # you override its constructor (at least in 2.4) if not 'prefix' in kw: # a breadcrumb ... kw['prefix'] = 'tmp_fixture_' - + tmp_path = path.realpath(mkdtemp(**kw)) root = DeletableDirPath(tmp_path) root._deferred = deferred @@ -45,7 +45,7 @@ def _expunge(tmpdir): if path_exists(tmpdir): import shutil shutil.rmtree(tmpdir) - + def _expunge_all(): """exit function to remove all registered tmp dirs.""" if _tmpdirs is None: @@ -53,20 +53,20 @@ def _expunge_all(): for d in _tmpdirs: _expunge(d) - + # this seems to be a safer way to clean up since __del__ can # be called in an unpredictable environment : atexit.register(_expunge_all) def mkdirall(path, mkdir=os.mkdir): """walks the path and makes any non-existant dirs. - + optional keyword `mkdir` is the callback for making a single dir - + """ - if path[-1] == os.path.sep: + if path[-1] == os.path.sep: path = path[0:-len(os.path.sep)] # trailing slash confused exists() - + root = (path[0] == os.path.sep) and os.path.sep or '' paths = split(path)[0].split(os.path.sep) if len(paths): @@ -78,15 +78,15 @@ def mkdirall(path, mkdir=os.mkdir): abs = join(root, accum) if not path_exists(abs): mkdir(abs) - + mkdir(path) def putfile(filename, contents, filelike=None, mode=None): """opens filename in writing mode, writes contents and closes. - + if filelike is None then it will be created with open() and the prefixed path will be walked to create non-existant dirs. - + """ if mode is None: mode = 'w' @@ -95,84 +95,84 @@ def putfile(filename, contents, filelike=None, mode=None): if parent and not path_exists(parent): mkdirall(parent) filelike = open(filename, mode) - + filelike.write(contents) filelike.close() - + class DirPath(str): """ A directory path. - - The instance will function exactly like a string but is enhanced with a few - common methods from os.path. Note that path.split() is implemented as - self.splitpath() since otherwise paths may not work right in other + + The instance will function exactly like a string but is enhanced with a few + common methods from os.path. Note that path.split() is implemented as + self.splitpath() since otherwise paths may not work right in other applications (conflicts with ``str.split()``). - + """ ## note: str.__init__() cannot be called due to builtin weirdness (warning in 2.6+) - + def __setattr__(self, name, val): - """self.new_directory = "rel/path/to/directory" - - a new attribute will be created as a relative directory and the value + """self.new_directory = "rel/path/to/directory" + + a new attribute will be created as a relative directory and the value will be stored as a new DirPath object. - + """ if not name.startswith('_'): path = self.mkdir(val) val = self._wrap(path) object.__setattr__(self, name, val) - + def _wrap(self, path): return DirPath(path) - + def abspath(self): """``os.path.abspath(self)``""" return self._wrap(path.abspath(self)) - + def basename(self): """``os.path.basename(self)``""" return self._wrap(path.basename(self)) - + def dirname(self): """``os.path.dirname(self)``""" return self._wrap(path.dirname(self)) - + def exists(self): """``os.path.exists(self)``""" return path_exists(self) - + def join(self, *dirs): """``os.path.join(self, *dirs)``""" return self._wrap(path.join(self, *dirs)) - + def mkdir(self, name): """makes a directory in the root and returns its full path. - - the path is split each non-existant directory is made. + + the path is split each non-existant directory is made. returns full path to new directory. - + """ # force it into an relative path... if name.startswith(os.path.sep): name = name[len(os.path.sep):] - + path = self.join(name) mkdirall(path) return path - + def normpath(self): """``os.path.normpath(self)``""" return self._wrap(path.normpath(self)) - + def putfile(self, fname, contents, mode=None): """puts new filename relative to your :func:`TempIO` root. Makes all directories along the path to the final file. - - The fname argument can be a complete path, but must not start with a - slash. Any missing directories will be created relative to the :func:`TempIO` + + The fname argument can be a complete path, but must not start with a + slash. Any missing directories will be created relative to the :func:`TempIO` root - + returns absolute filename. """ relpath, fname = split(fname) @@ -183,19 +183,19 @@ def putfile(self, fname, contents, mode=None): "currently '%s')" % (relpath, self)) if relpath and not self.join(relpath).exists(): self.mkdir(relpath) - + f = self.join(relpath, fname) putfile(f, contents, mode=mode) return f - + def realpath(self): """``os.path.realpath(self)``""" return self._wrap(path.realpath(self)) - + def splitext(self): """``os.path.splitext(self)``""" return path.splitext(self) - + def splitpath(self): """``os.path.split(self)`` """ @@ -204,11 +204,11 @@ def splitpath(self): class DeletableDirPath(DirPath): """ A temporary directory path. - + That is, one that can be deleted. - + .. note:: Use the :func:`TempIO` function to create an instance - + """ def __del__(self): """ @@ -224,11 +224,11 @@ def __del__(self): # due to the unpredictable state of python's destructors; there is # nothing really to do pass - + def rmtree(self): """forcefully removes the root directory and everything under it. - - This can be trusted more than :meth:`del self ` because it is guaranteed to + + This can be trusted more than :meth:`del self ` because it is guaranteed to remove the directory tree. """ _expunge(self) @@ -236,4 +236,4 @@ def rmtree(self): if __name__ == '__main__': import doctest doctest.testmod() - + diff --git a/fixture/loadable/__init__.py b/fixture/loadable/__init__.py index b999c96..4945710 100644 --- a/fixture/loadable/__init__.py +++ b/fixture/loadable/__init__.py @@ -1,14 +1,19 @@ """Loadable fixture components""" +from __future__ import absolute_import + __all__ = ['SQLAlchemyFixture', 'SQLObjectFixture', 'GoogleDatastoreFixture', - 'DjangoFixture', 'StormFixture'] -import loadable + 'DjangoFixture', 'StormFixture', 'DBLoadableFixture', + 'EnvLoadableFixture', 'LoadableFixture'] + +from fixture.loadable import loadable __doc__ = loadable.__doc__ -from loadable import * -from sqlalchemy_loadable import SQLAlchemyFixture -from sqlobject_loadable import SQLObjectFixture -from google_datastore_loadable import GoogleDatastoreFixture -from django_loadable import DjangoFixture -from storm_loadable import StormFixture + +from .loadable import LoadableFixture, DBLoadableFixture, EnvLoadableFixture +from .sqlalchemy_loadable import SQLAlchemyFixture +from .sqlobject_loadable import SQLObjectFixture +from .google_datastore_loadable import GoogleDatastoreFixture +from .django_loadable import DjangoFixture +from .storm_loadable import StormFixture diff --git a/fixture/loadable/django_loadable.py b/fixture/loadable/django_loadable.py index adaf24d..bd872fa 100644 --- a/fixture/loadable/django_loadable.py +++ b/fixture/loadable/django_loadable.py @@ -176,7 +176,7 @@ def then_finally(self, unloading=False): from django.db import transaction try: self.transaction.leave_transaction_management() - except transaction.TransactionManagementError, e: + except transaction.TransactionManagementError as e: raise def attach_storage_medium(self, ds): @@ -271,4 +271,4 @@ def get(name, default=None): else: return get_model(app_label, model_name) - \ No newline at end of file + diff --git a/fixture/loadable/loadable.py b/fixture/loadable/loadable.py index 5664888..301d281 100644 --- a/fixture/loadable/loadable.py +++ b/fixture/loadable/loadable.py @@ -24,19 +24,19 @@ def __init__(self, medium, dataset): self.medium = medium self.dataset = dataset self.transaction = None - + def __getattr__(self, name): return getattr(self.obj, name) - + def __repr__(self): return "%s at %s for %s" % ( self.__class__.__name__, hex(id(self)), self.medium) - + def clear(self, obj): """Must clear the stored object. """ raise NotImplementedError - + def clearall(self): """Must clear all stored objects. """ @@ -44,49 +44,48 @@ def clearall(self): for obj in self.dataset.meta._stored_objects: try: self.clear(obj) - except Exception, e: - etype, val, tb = sys.exc_info() - raise UnloadError(etype, val, self.dataset, - stored_object=obj), None, tb - + except Exception as e: + etype, val, _ = sys.exc_info() + raise UnloadError(etype, val, self.dataset, stored_object=obj) + def save(self, row, column_vals): """Given a DataRow, must save it somehow. - + column_vals is an iterable of (column_name, column_value) """ raise NotImplementedError - + def visit_loader(self, loader): """A chance to visit the LoadableFixture object. - + By default it does nothing. """ pass class LoadQueue(ObjRegistry): """Keeps track of what class instances were loaded. - + "level" is used like so: - - The lower the level, the lower that object is on the foreign key chain. - As the level increases, this means more foreign objects depend on the - local object. Thus, objects need to be unloaded starting at the lowest - level and working up. Also, since objects can appear multiple times in - foreign key chains, the queue only acknowledges the object at its - highest level, since this will ensure all dependencies get unloaded - before it. - + + The lower the level, the lower that object is on the foreign key chain. + As the level increases, this means more foreign objects depend on the + local object. Thus, objects need to be unloaded starting at the lowest + level and working up. Also, since objects can appear multiple times in + foreign key chains, the queue only acknowledges the object at its + highest level, since this will ensure all dependencies get unloaded + before it. + """ def __init__(self): ObjRegistry.__init__(self) self.tree = {} self.limit = {} - + def __repr__(self): return "<%s at %s>" % ( self.__class__.__name__, hex(id(self))) - + def _pushid(self, id, level): if id in self.limit: # only store the object at its highest level: @@ -98,67 +97,66 @@ def _pushid(self, id, level): self.tree.setdefault(level, []) self.tree[level].append(id) self.limit[id] = level - + def clear(self): """clear internal registry""" ObjRegistry.clear(self) # this is an attempt to free up refs to database connections: self.tree = {} self.limit = {} - + def register(self, obj, level): """register this object as "loaded" at level """ id = ObjRegistry.register(self, obj) self._pushid(id, level) return id - + def referenced(self, obj, level): """tell the queue that this object was referenced again at level. """ id = self.id(obj) self._pushid(id, level) - + def to_unload(self): """yields a list of objects in an order suitable for unloading. """ level_nums = self.tree.keys() - level_nums.sort() treelog.info("*** unload order ***") - for level in level_nums: + for level in sorted(level_nums): unload_queue = self.tree[level] verbose_obj = [] - + for id in unload_queue: obj = self.registry[id] verbose_obj.append(obj.__class__.__name__) yield obj - + treelog.info("%s. %s", level, verbose_obj) - + class LoadableFixture(Fixture): """ knows how to load data into something useful. - - This is an abstract class and cannot be used directly. You can use a - LoadableFixture that already knows how to load into a specific medium, - such as SQLAlchemyFixture, or create your own to build your own to load + + This is an abstract class and cannot be used directly. You can use a + LoadableFixture that already knows how to load into a specific medium, + such as SQLAlchemyFixture, or create your own to build your own to load DataSet objects into custom storage media. Keyword Arguments: - + dataclass class to instantiate with datasets (defaults to that of Fixture) style a Style object to translate names with (defaults to NamedDataStyle) medium - optional LoadableFixture.StorageMediumAdapter to store DataSet + optional LoadableFixture.StorageMediumAdapter to store DataSet objects with - + """ style = OriginalStyle() dataclass = Fixture.dataclass - + def __init__(self, style=None, medium=None, **kw): Fixture.__init__(self, loader=self, **kw) if style: @@ -166,61 +164,61 @@ def __init__(self, style=None, medium=None, **kw): if medium: self.Medium = medium self.loaded = None - + StorageMediumAdapter = StorageMediumAdapter Medium = StorageMediumAdapter StorageMediaNotFound = StorageMediaNotFound LoadQueue = LoadQueue - + def attach_storage_medium(self, ds): """attach a :class:`StorageMediumAdapter` to DataSet""" raise NotImplementedError - + def begin(self, unloading=False): """begin loading""" if not unloading: self.loaded = self.LoadQueue() - + def commit(self): """commit load transaction""" raise NotImplementedError - + def load(self, data): """load data""" def loader(): for ds in data: self.load_dataset(ds) self.wrap_in_transaction(loader, unloading=False) - + def load_dataset(self, ds, level=1): """load this dataset and all its dependent datasets. - - level is essentially the order of processing (going from dataset to - dependent datasets). Child datasets are always loaded before the - parent. The level is important for visualizing the chain of - dependencies : 0 is the bottom, and thus should be the first set of + + level is essentially the order of processing (going from dataset to + dependent datasets). Child datasets are always loaded before the + parent. The level is important for visualizing the chain of + dependencies : 0 is the bottom, and thus should be the first set of objects unloaded - + """ is_parent = level==1 - + levsep = is_parent and "/--------" or "|__.." treelog.info( - "%s%s%s (%s)", level * ' ', levsep, ds.__class__.__name__, + "%s%s%s (%s)", level * ' ', levsep, ds.__class__.__name__, (is_parent and "parent" or level)) - + for ref_ds in ds.meta.references: r = ref_ds.shared_instance(default_refclass=self.dataclass) new_level = level+1 self.load_dataset(r, level=new_level) - + self.attach_storage_medium(ds) - + if ds in self.loaded: # keep track of its order but don't actually load it... self.loaded.referenced(ds, level) return - + log.info("LOADING rows in %s", ds) ds.meta.storage_medium.visit_loader(self) registered = False @@ -239,12 +237,12 @@ def column_vals(): if not registered: self.loaded.register(ds, level) registered = True - - except Exception, e: - etype, val, tb = sys.exc_info() - raise LoadError(etype, val, ds, key=key, row=row), None, tb - - def resolve_row_references(self, current_dataset, row): + + except Exception as e: + etype, val, _ = sys.exc_info() + raise LoadError(etype, val, ds, key=key, row=row) + + def resolve_row_references(self, current_dataset, row): """resolve this DataRow object's referenced values. """ def resolved_rowlike(rowlike): @@ -253,18 +251,18 @@ def resolved_rowlike(rowlike): return DeferredStoredObject(rowlike._dataset, key) loaded_ds = self.loaded[rowlike._dataset] return loaded_ds.meta._stored_objects.get_object(key) - def resolve_stored_object(candidate): + def resolve_stored_object(candidate): if is_rowlike(candidate): return resolved_rowlike(candidate) else: - # then it is the stored object itself. this would happen if - # there is a reciprocal foreign key (i.e. organization has a + # then it is the stored object itself. this would happen if + # there is a reciprocal foreign key (i.e. organization has a # parent organization) return candidate - + for name in row.columns(): val = getattr(row, name) - if type(val) in (types.ListType, types.TupleType): + if type(val) in (list, tuple): # i.e. categories = [python, ruby] setattr(row, name, map(resolve_stored_object, val)) elif is_rowlike(val): @@ -273,18 +271,18 @@ def resolve_stored_object(candidate): elif isinstance(val, Ref.Value): # i.e. category_id = python.id. ref = val.ref - # now the ref will return the attribute from a stored object + # now the ref will return the attribute from a stored object # when __get__ is invoked ref.dataset_obj = self.loaded[ref.dataset_class] - + def rollback(self): """rollback load transaction""" raise NotImplementedError - + def then_finally(self, unloading=False): """called in a finally block after load transaction has begun""" pass - + def unload(self): """unload data""" if self.loaded is None: @@ -297,11 +295,11 @@ def unloader(): self.loaded.clear() dataset_registry.clear() self.wrap_in_transaction(unloader, unloading=True) - + def unload_dataset(self, dataset): """unload data stored for this dataset""" dataset.meta.storage_medium.clearall() - + def wrap_in_transaction(self, routine, unloading=False): """call routine in a load transaction""" self.begin(unloading=unloading) @@ -318,43 +316,43 @@ def wrap_in_transaction(self, routine, unloading=False): class EnvLoadableFixture(LoadableFixture): """An abstract fixture that can resolve DataSet objects from an env. - + Keyword "env" should be a dict or a module if not None. According to the style rules, the env will be used to find objects by name. - + """ def __init__(self, env=None, **kw): LoadableFixture.__init__(self, **kw) self.env = env - + def attach_storage_medium(self, ds): """Lookup a storage medium in the ``env`` and attach it to a DataSet. - - A storage medium is looked up by name. If a specific name has not been declared in the DataSet - then it will be guessed using the :meth:`Style.guess_storable_name ` method. - - Once a name is found (typically the name of a DataSet class, say, EmployeeData) then it is looked up + + A storage medium is looked up by name. If a specific name has not been declared in the DataSet + then it will be guessed using the :meth:`Style.guess_storable_name ` method. + + Once a name is found (typically the name of a DataSet class, say, EmployeeData) then it is looked up in the ``env`` which is expected to be a dict or module like object. - + The method first tries ``env.get('EmployeeData')`` then ``getattr(env, 'EmployeeData')``. - + The return value is the storage medium (i.e. a data mapper for the Employees table) - - Note that a :mod:`style ` might translate a name to maintain a consistent + + Note that a :mod:`style ` might translate a name to maintain a consistent naming scheme between DataSet classes and data mappers. - + """ if ds.meta.storage_medium is not None: # already attached... return - + storable = ds.meta.storable - + if not storable: if not ds.meta.storable_name: ds.meta.storable_name = self.style.guess_storable_name( ds.__class__.__name__) - + if hasattr(self.env, 'get'): storable = self.env.get(ds.meta.storable_name, None) if not storable: @@ -363,24 +361,24 @@ def attach_storage_medium(self, ds): storable = getattr(self.env, ds.meta.storable_name) except AttributeError: pass - + if not storable: repr_env = repr(type(self.env)) if hasattr(self.env, '__module__'): repr_env = "%s from '%s'" % (repr_env, self.env.__module__) - + raise self.StorageMediaNotFound( "could not find %s '%s' for " "dataset %s in self.env (%s)" % ( self.Medium, ds.meta.storable_name, ds, repr_env)) - + if storable == ds.__class__: raise ValueError( "cannot use %s %s as a storable object of itself! " "(perhaps your style object was not configured right?)" % ( ds.__class__.__name__, ds.__class__)) ds.meta.storage_medium = self.Medium(storable, ds) - + def resolve_stored_object(self, column_val): if type(column_val)==DeferredStoredObject: return column_val.get_stored_object_from_loader(self) @@ -390,44 +388,44 @@ def resolve_stored_object(self, column_val): class DBLoadableFixture(EnvLoadableFixture): """ An abstract fixture that can load a DataSet into a database like thing. - - More specifically, one that forces its implementation to run atomically + + More specifically, one that forces its implementation to run atomically (within a begin / commit / rollback block). """ def __init__(self, dsn=None, **kw): EnvLoadableFixture.__init__(self, **kw) self.dsn = dsn self.transaction = None - + def begin(self, unloading=False): """begin loading data""" EnvLoadableFixture.begin(self, unloading=unloading) self.transaction = self.create_transaction() - + def commit(self): """call transaction.commit() on transaction returned by :meth:`DBLoadableFixture.create_transaction`""" self.transaction.commit() - + def create_transaction(self): """must return a transaction object that implements commit() and rollback() - + .. note:: transaction.begin() will not be called. If that is necessary then call begin before returning the object. - + """ raise NotImplementedError - + def rollback(self): """call transaction.rollback() on transaction returned by :meth:`DBLoadableFixture.create_transaction`""" self.transaction.rollback() class DeferredStoredObject(object): """A stored representation of a row in a DataSet, deferred. - - The actual stored object can only be resolved by the StoredMediumAdapter + + The actual stored object can only be resolved by the StoredMediumAdapter itself - + Imagine...:: - + >>> from fixture import DataSet >>> class PersonData(DataSet): ... class adam: @@ -437,17 +435,17 @@ class DeferredStoredObject(object): ... class jenny: ... pass ... jenny.father = adam - ... - - This would be a way to indicate that jenny's father is adam. This class - will encapsulate that reference so it can be resolved as close to when it + ... + + This would be a way to indicate that jenny's father is adam. This class + will encapsulate that reference so it can be resolved as close to when it was created as possible. - + """ def __init__(self, dataset, key): self.dataset = dataset self.key = key - + def get_stored_object_from_loader(self, loader): loaded_ds = loader.loaded[self.dataset] return loaded_ds.meta._stored_objects.get_object(self.key) @@ -455,4 +453,4 @@ def get_stored_object_from_loader(self, loader): if __name__ == '__main__': import doctest doctest.testmod() - + diff --git a/fixture/loadable/sqlalchemy_loadable.py b/fixture/loadable/sqlalchemy_loadable.py index 934a9b0..1379740 100644 --- a/fixture/loadable/sqlalchemy_loadable.py +++ b/fixture/loadable/sqlalchemy_loadable.py @@ -23,7 +23,7 @@ import sqlalchemy sa_major = float(sqlalchemy.__version__[:3]) # i.e. 0.4 or 0.5 if sa_major < 0.5: - Session = scoped_session(sessionmaker(autoflush=False, transactional=True), scopefunc=lambda:__name__) + Session = scoped_session(sessionmaker(autoflush=False), scopefunc=lambda:__name__) else: Session = scoped_session(sessionmaker(autoflush=False, autocommit=False), scopefunc=lambda:__name__) diff --git a/fixture/test/test_command/__init__.py b/fixture/test/test_command/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/fixture/test/test_loadable/__init__.py b/fixture/test/test_loadable/__init__.py deleted file mode 100644 index 5b2b4cb..0000000 --- a/fixture/test/test_loadable/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ - -from test_loadable import * \ No newline at end of file diff --git a/fixture/util.py b/fixture/util.py index 4001c06..965ae23 100644 --- a/fixture/util.py +++ b/fixture/util.py @@ -11,23 +11,23 @@ class DataTestCase(object): """ A mixin to use with unittest.TestCase. - - Upon setUp() the TestCase will load the DataSet classes using your Fixture, - specified in class variables. At tearDown(), all loaded data will be - removed. During your test, you will have ``self.data``, a SuperSet instance + + Upon setUp() the TestCase will load the DataSet classes using your Fixture, + specified in class variables. At tearDown(), all loaded data will be + removed. During your test, you will have ``self.data``, a SuperSet instance to reference loaded data - + Class Attributes: - + ``fixture`` the :class:`Fixture ` instance to load :class:`DataSet ` classes with - + ``datasets`` A list of :class:`DataSet ` classes to load - + ``data`` ``self.data``, a :class:`Fixture.Data ` instance populated for you after ``setUp()`` - + """ fixture = None data = None @@ -39,37 +39,36 @@ def setUp(self): raise ValueError("there are no datasets to load") self.data = self.fixture.data(*self.datasets) self.data.setup() - + def tearDown(self): self.data.teardown() class ObjRegistry: """registers objects by class. - + all lookup methods expect to get either an instance or a class type. """ def __init__(self): self.registry = {} - + def __repr__(self): return repr(self.registry) - + def __getitem__(self, obj): try: return self.registry[self.id(obj)] except KeyError: - etype, val, tb = sys.exc_info() - raise KeyError("object %s is not in registry" % obj), None, tb - + raise KeyError("object %s is not in registry" % obj) + def __contains__(self, object): return self.has(object) - + def clear(self): self.registry = {} - + def has(self, object): return self.id(object) in self.registry - + def id(self, object): if hasattr(object, '__class__'): if issubclass(object.__class__, type): @@ -86,7 +85,7 @@ def id(self, object): "cannot identify object %s because it isn't an " "instance or a class" % object) return id(cls) - + def register(self, object): id = self.id(object) self.registry[id] = object @@ -94,14 +93,14 @@ def register(self, object): def with_debug(*channels, **kw): """ - A `nose`_ decorator calls :func:`start_debug` / :func:`start_debug` before and after the + A `nose`_ decorator calls :func:`start_debug` / :func:`start_debug` before and after the decorated method. - - All positional arguments are considered channels that should be debugged. + + All positional arguments are considered channels that should be debugged. Keyword arguments are passed to :func:`start_debug` - + .. _nose: http://somethingaboutorange.com/mrl/projects/nose/ - + """ from nose.tools import with_setup def setup(): @@ -117,13 +116,13 @@ def reset_log_level(level=logging.CRITICAL, channels=( "fixture.loadable.tree")): """ Resets the level on all fixture logs. - - You may need to call this when other applications + + You may need to call this when other applications reset the root logger's log level. - + Calling this with no args sets all logs to logging.CRITICAL which should keep them quiet - + Added in version 1.1 """ for ch in channels: @@ -132,46 +131,46 @@ def reset_log_level(level=logging.CRITICAL, channels=( def start_debug(channel, stream=sys.stdout, handler=None, level=logging.DEBUG): """ A shortcut to start logging a channel to a stream. - + For example:: - + >>> from fixture.util import start_debug, stop_debug >>> start_debug("fixture.loadable") - - starts logging messages from the fixture.loadable channel to the stream. + + starts logging messages from the fixture.loadable channel to the stream. Then... :: - + >>> stop_debug("fixture.loadable") - + ...turns it off. - + Available Channels: - + ``fixture.loadable`` logs LOAD and CLEAR messages, referring to dataset actions - + ``fixture.loadable.tree`` logs a tree view of datasets loaded by datasets (recursion) - - + + Keyword Arguments: - + ``stream`` stream to create a loggin.StreamHandler with. defaults to stdout. - + ``handler`` a preconfigured handler to add to the log - + ``level`` a logging level to set, default is logging.DEBUG - - - .. note:: - Other applications might add a handler to the root logger, - in which case you can't turn off debug output without messing + + + .. note:: + Other applications might add a handler to the root logger, + in which case you can't turn off debug output without messing with the root logger. - - + + """ log = logging.getLogger(channel) if not handler: @@ -192,7 +191,7 @@ def flush(self, *a, **kw): pass def _mklog(channel, default_level=logging.CRITICAL, default_stream=None): """ - returns a log object that does nothing until something + returns a log object that does nothing until something calls start_debug() """ log = logging.getLogger(channel) @@ -212,4 +211,4 @@ def any(iterable): return True return False - \ No newline at end of file + diff --git a/fixture/version.py b/fixture/version.py new file mode 100644 index 0000000..fca155b --- /dev/null +++ b/fixture/version.py @@ -0,0 +1,11 @@ +""" +Version information for Fixture + +This file is imported by ``fixture.__init__``, and parsed by +``setup.py`` as well as ``docs/conf.py``. +""" + +# Do not change the format of this next line. Doing so risks breaking +# setup.py and docs/conf.py + +__version__ = "1.5" diff --git a/setup.py b/setup.py index 47502bb..3b70759 100644 --- a/setup.py +++ b/setup.py @@ -1,81 +1,74 @@ - -import sys, os +import ast import ez_setup ez_setup.use_setuptools() -from setuptools import setup, find_packages -import compiler +import os +import re import pydoc -from compiler import visitor +import sys + +from setuptools import setup, find_packages -class ModuleVisitor(object): - def __init__(self): - self.mod_doc = None - self.mod_version = None - - def default(self, node): - for child in node.getChildNodes(): - self.visit(child) - - def visitModule(self, node): - self.mod_doc = node.doc - self.default(node) - - def visitAssign(self, node): - if self.mod_version: - return - asn = node.nodes[0] - assert asn.name == '__version__', ( - "expected __version__ node: %s" % asn) - self.mod_version = node.expr.value - self.default(node) - -def get_module_meta(modfile): - ast = compiler.parseFile(modfile) - modnode = ModuleVisitor() - visitor.walk(ast, modnode) - if modnode.mod_doc is None: +# Get the version string. Cannot be done with import! +with open(os.path.join('fixture', 'version.py'), 'rt') as f: + version = re.search( + '__version__\s*=\s*"(?P.*)"\n', + f.read() + ).group('version') + +def get_module_meta(modfile): + with open(modfile) as f: + doc = ast.get_docstring(ast.parse(f.read())) + if doc is None: raise RuntimeError( "could not parse doc string from %s" % modfile) - if modnode.mod_version is None: - raise RuntimeError( - "could not parse __version__ from %s" % modfile) - return (modnode.mod_version,) + pydoc.splitdoc(modnode.mod_doc) + return pydoc.splitdoc(doc) -version, description, long_description = get_module_meta("./fixture/__init__.py") +description, long_description = get_module_meta( + os.path.join('fixture', '__init__.py')) setup( - name = 'fixture', - version = version, - author = 'Kumar McMillan', - author_email = 'kumar dot mcmillan / gmail.com', - description = description, - classifiers = [ 'Environment :: Other Environment', - 'Intended Audience :: Developers', - ( 'License :: OSI Approved :: GNU Library or Lesser ' - 'General Public License (LGPL)'), - 'Natural Language :: English', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Topic :: Software Development :: Testing', - 'Topic :: Software Development :: Quality Assurance', - 'Topic :: Utilities'], - long_description = long_description, - license = 'GNU Lesser General Public License (LGPL)', - keywords = ('test testing tools unittest fixtures setup teardown ' - 'database stubs IO tempfile'), - url = 'http://farmdev.com/projects/fixture/', - - packages = find_packages(), - - test_suite="fixture.setup_test_not_supported", - entry_points = { - 'console_scripts': [ 'fixture = fixture.command.generate:main' ] - }, + name='fixture', + version=version, + author='Kumar McMillan', + author_email='kumar dot mcmillan / gmail.com', + description=description, + classifiers=[ 'Environment :: Other Environment', + 'Intended Audience :: Developers', + ('License :: OSI Approved :: GNU Library or Lesser ' + 'General Public License (LGPL)'), + 'Natural Language :: English', + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Topic :: Software Development :: Testing', + 'Topic :: Software Development :: Quality Assurance', + 'Topic :: Utilities'], + long_description=long_description, + license='GNU Lesser General Public License (LGPL)', + keywords=('test testing tools unittest fixtures setup teardown ' + 'database stubs IO tempfile'), + url='http://farmdev.com/projects/fixture/', + + packages=find_packages(), + install_requires=['six'], + entry_points={ + 'console_scripts': [ + 'fixture = fixture.command.generate:main', + 'manage=fixture.examples.django_example.manage:main', + ], + }, # the following allows e.g. easy_install fixture[django] - extras_require = { + extras_require={ 'decorators': ['nose>=0.9.2'], 'sqlalchemy': ['SQLAlchemy>=0.4'], 'sqlobject': ['SQLObject==0.8'], 'django': ['django'], - }, - ) \ No newline at end of file + }, + test_suite='nose.collector', + tests_require=[ + 'nose', + 'coverage', + 'sqlalchemy', + 'sqlobject', + 'django', + ], +) diff --git a/setup_test_buildout.py b/setup_test_buildout.py deleted file mode 100644 index ad6fdc6..0000000 --- a/setup_test_buildout.py +++ /dev/null @@ -1,62 +0,0 @@ -############################################################################## -# -# Copyright (c) 2006 Zope Corporation and Contributors. -# All Rights Reserved. -# -# This software is subject to the provisions of the Zope Public License, -# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. -# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED -# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS -# FOR A PARTICULAR PURPOSE. -# -############################################################################## -"""Bootstrap a buildout-based project - -Simply run this script in a directory containing a buildout.cfg. -The script accepts buildout command-line options, so you can -use the -c option to specify an alternate configuration file. - -$Id: bootstrap.py 85041 2008-03-31 15:57:30Z andreasjung $ -""" - -import os, shutil, sys, tempfile, urllib2 - -tmpeggs = tempfile.mkdtemp() - -try: - import pkg_resources -except ImportError: - ez = {} - exec urllib2.urlopen('http://peak.telecommunity.com/dist/ez_setup.py' - ).read() in ez - ez['use_setuptools'](to_dir=tmpeggs, download_delay=0) - - import pkg_resources - -if sys.platform == 'win32': - def quote(c): - if ' ' in c: - return '"%s"' % c # work around spawn lamosity on windows - else: - return c -else: - def quote (c): - return c - -cmd = 'from setuptools.command.easy_install import main; main()' -ws = pkg_resources.working_set -assert os.spawnle( - os.P_WAIT, sys.executable, quote (sys.executable), - '-c', quote (cmd), '-mqNxd', quote (tmpeggs), 'zc.buildout', - dict(os.environ, - PYTHONPATH= - ws.find(pkg_resources.Requirement.parse('setuptools')).location - ), - ) == 0 - -ws.add_entry(tmpeggs) -ws.require('zc.buildout') -import zc.buildout.buildout -zc.buildout.buildout.main(sys.argv[1:] + ['bootstrap']) -shutil.rmtree(tmpeggs) diff --git a/fixture/test/__init__.py b/tests/__init__.py similarity index 76% rename from fixture/test/__init__.py rename to tests/__init__.py index 8548bb2..0dc251e 100644 --- a/fixture/test/__init__.py +++ b/tests/__init__.py @@ -4,25 +4,17 @@ There are several things to build before you can run the tests. Hopefully this will be simplified in the future but for now, do this: -Create the buildout:: +Install everything :: - $ python2.5 setup_test_buildout.py - -Check out the trunk of Django (until 1.1 is released) into a src dir for buildout :: - - $ svn co http://code.djangoproject.com/svn/django/trunk/ src/django - -Build everything :: - - $ ./bin/buildout + $ pip install .[django] Run syncdb on the Django test DB and create a superuser :: - - $ ./bin/manage syncdb + + $ manage syncdb Run the tests :: - - $ ./bin/test-fixture + + $ python setup.py test Environment Variables --------------------- @@ -30,36 +22,38 @@ The test suite is affected by several environment variables: - FIXTURE_TEST_HEAVY_DSN - - - a database connection that can support operations like foreign key relations + + - a database connection that can support operations like foreign key relations (sqlite won't through foreign key errors) - defaults to None. - - typically this would be a postgres connection where temp tables can be + - typically this would be a postgres connection where temp tables can be created and destroyed - - a special DSN, "sqlite:///:tmp:", will create a connection to a temporary - file-based sqlite db. This is necessary because :memory: dbs can't be + - a special DSN, "sqlite:///:tmp:", will create a connection to a temporary + file-based sqlite db. This is necessary because :memory: dbs can't be shared easily using sqlalchemy (connections are not pooled) - FIXTURE_TEST_LITE_DSN - + - a database as lite as possible, for speed - defaults to sqlite:///:memory: - + As a shortcut, you can run this to set these variables in your shell :: - - $ source fixture/test/profile/full.sh + + $ source tests/profile/full.sh """ -import unittest, nose, os -from fixture.test import conf +import nose +import os +import unittest +from . import conf def setup(): # super hack: if conf.HEAVY_DSN == 'sqlite:///:tmp:': conf.HEAVY_DSN_IS_TEMPIO = True conf.reset_heavy_dsn() - + # this is here because the doc generator also runs doctests. # should fix that to use proper _test() methods for a module teardown_examples() @@ -72,14 +66,14 @@ def teardown_examples(): os.unlink('/tmp/fixture_example.db') if os.path.exists('/tmp/fixture_generate.db'): os.unlink('/tmp/fixture_generate.db') - + class PrudentTestResult(unittest.TestResult): """A test result that raises an exception immediately""" def _raise_err(self, err): - exctype, value, tb = err - raise Exception("%s: %s" % (exctype, value)), None, tb - + exctype, value, _ = err + raise Exception("%s: %s" % (exctype, value)) + def addFailure(self, test, err): self._raise_err(err) def addError(self, test, err): @@ -90,26 +84,23 @@ def printErrors(self): pass def printErrorList(self, flavour, errors): pass - + class SilentTestRunner(unittest.TextTestRunner): - """a test runner that doesn't print output but raises + """a test runner that doesn't print output but raises exceptions immediately """ - def _makeResult(self): + def _makeResult(self): return _SilentTestResult() - + def run(self, test): "Run the given test case or test suite." result = self._makeResult() test(result) return result - + def attr(**kwargs): """Add attributes to a test function/method/class""" def wrap(func): func.__dict__.update(kwargs) return func return wrap - - - \ No newline at end of file diff --git a/fixture/test/conf.py b/tests/conf.py similarity index 100% rename from fixture/test/conf.py rename to tests/conf.py diff --git a/fixture/test/env_supports.py b/tests/env_supports.py similarity index 100% rename from fixture/test/env_supports.py rename to tests/env_supports.py diff --git a/fixture/test/test_command/test_generate/__init__.py b/tests/generate_helper.py similarity index 89% rename from fixture/test/test_command/test_generate/__init__.py rename to tests/generate_helper.py index e37ca37..bc44ba3 100644 --- a/fixture/test/test_command/test_generate/__init__.py +++ b/tests/generate_helper.py @@ -1,11 +1,17 @@ +from __future__ import print_function, absolute_import + import sys import os + from nose.tools import eq_ from nose.exc import SkipTest -from fixture.test import conf + from fixture.command.generate import DataSetGenerator, dataset_generator +from . import conf + + def setup(): # every tests needs a real db conn : if not conf.HEAVY_DSN: @@ -13,7 +19,7 @@ def setup(): def compile_(code): """compiles code string for a module. - + returns dict w/ attributes of that module. """ mod = {} @@ -21,40 +27,40 @@ def compile_(code): return mod class GenerateTest(object): - """tests that a fixture code generator can run with the specified arguments + """tests that a fixture code generator can run with the specified arguments and produce a loadable fixture. - - the details of which arguments, how that fixture loads data, and how the - data load is proven is defined in the concrete implementation of this test + + the details of which arguments, how that fixture loads data, and how the + data load is proven is defined in the concrete implementation of this test class - + """ args = [] - + def __init__(self, *a, **kw): super(GenerateTest, self).__init__(*a, **kw) self.env = None - + def assert_env_is_clean(self): raise NotImplementedError - + def assert_env_generated_ok(self, env): raise NotImplementedError - + def assert_data_loaded(self, data): raise NotImplementedError - + def create_fixture(self): raise NotImplementedError("must return a concrete LoadableFixture instance, i.e. SQLAlchemyFixture") - + def load_env(self, module): raise NotImplementedError - + def dataset_generator(self, extra_args=[]): args = [a for a in self.args] if extra_args: args.extend(extra_args) - + self.assert_env_is_clean() code = dataset_generator(args) try: @@ -63,12 +69,12 @@ def dataset_generator(self, extra_args=[]): data = self.load_env(self.env) self.assert_data_loaded(data) except: - print code + print(code) raise - - def test_query(self): + + def test_query(self): self.dataset_generator(['-w', "name = 'super cash back!'"]) - + def test_query_no_data(self): _stderr = sys.stderr sys.stderr = sys.stdout @@ -78,37 +84,37 @@ def wrong_exc(exc=None): try: try: self.dataset_generator(['-w', "name = 'fooobzarius'"]) - except SystemExit, e: + except SystemExit as e: eq_(e.code, 2) - except Exception, e: + except Exception as e: wrong_exc(e) else: wrong_exc() finally: sys.stderr = _stderr - + class UsingTesttoolsTemplate(object): def __init__(self, *a,**kw): super(UsingTesttoolsTemplate, self).__init__(*a,**kw) self.args = [a for a in self.args] + ["--template=testtools"] - + def load_datasets(self, module, datasets): from testtools.fixtures import affix fxt = affix(*[d() for d in datasets]) - return fxt - + return fxt + class UsingFixtureTemplate(object): def __init__(self, *a,**kw): super(UsingFixtureTemplate, self).__init__(*a,**kw) self.args = [a for a in self.args] + ["--template=fixture"] - + def visit_loader(self, loader): pass - + def load_datasets(self, module, datasets): fixture = self.create_fixture() self.visit_loader(fixture.loader) d = fixture.data(*datasets) d.setup() - return d \ No newline at end of file + return d diff --git a/fixture/test/profile/full.sh b/tests/profile/full.sh similarity index 100% rename from fixture/test/profile/full.sh rename to tests/profile/full.sh diff --git a/fixture/test/profile/quick.sh b/tests/profile/quick.sh similarity index 100% rename from fixture/test/profile/quick.sh rename to tests/profile/quick.sh diff --git a/fixture/test/test_base.py b/tests/test_base.py similarity index 95% rename from fixture/test/test_base.py rename to tests/test_base.py index a1ad542..5cddb3d 100644 --- a/fixture/test/test_base.py +++ b/tests/test_base.py @@ -1,22 +1,27 @@ -from cStringIO import StringIO +from __future__ import absolute_import + +import nose.case +import nose.loader +import nose.tools import sys -import nose.tools, nose.case, nose.loader -from nose.tools import eq_, raises -from fixture.test import attr, SilentTestRunner + +from . import attr, SilentTestRunner from fixture.base import Fixture +from nose.tools import eq_, raises +from six import StringIO mock_call_log = [] def reset_mock_call_log(): mock_call_log[:] = [] - + class MockLoader(object): def load(self, data): mock_call_log.append((self.__class__, 'load', data.__class__)) def unload(self): mock_call_log.append((self.__class__, 'unload')) - + class AbusiveMockLoader(object): def load(self, data): mock_call_log.append((self.__class__, 'load', data.__class__)) @@ -33,15 +38,15 @@ def shared_instance(self, *a, **kw): return self() class StubDataset1(StubDataset): pass class StubDataset2(StubDataset): pass - + class TestFixture: def setUp(self): reset_mock_call_log() self.fxt = Fixture(loader=MockLoader(), dataclass=StubSuperSet) - + def tearDown(self): reset_mock_call_log() - + @attr(unit=True) def test_data_sets_up_and_tears_down_data(self): data = self.fxt.data(StubDataset1, StubDataset2) @@ -49,7 +54,7 @@ def test_data_sets_up_and_tears_down_data(self): eq_(mock_call_log[-1], (MockLoader, 'load', StubSuperSet)) data.teardown() eq_(mock_call_log[-1], (MockLoader, 'unload')) - + @attr(unit=True) def test_data_implements_with_statement(self): data = self.fxt.data(StubDataset1, StubDataset2) @@ -57,7 +62,7 @@ def test_data_implements_with_statement(self): eq_(mock_call_log[-1], (MockLoader, 'load', StubSuperSet)) data.__exit__(None, None, None) eq_(mock_call_log[-1], (MockLoader, 'unload')) - + @attr(unit=True) def test_with_data_decorates_a_callable(self): @self.fxt.with_data(StubDataset1, StubDataset2) @@ -67,7 +72,7 @@ def some_callable(data): eq_(mock_call_log[0], (MockLoader, 'load', StubSuperSet)) eq_(mock_call_log[1], ('some_callable', Fixture.Data)) eq_(mock_call_log[2], (MockLoader, 'unload')) - + @attr(unit=True) def test_with_data_calls_teardown_on_error(self): @self.fxt.with_data(StubDataset1, StubDataset2) @@ -76,7 +81,7 @@ def some_callable(data): raises(RuntimeError)(some_callable)() eq_(mock_call_log[0], (MockLoader, 'load', StubSuperSet)) eq_(mock_call_log[1], (MockLoader, 'unload')) - + @attr(unit=True) def test_with_data_aborts_teardown_on_interrupt(self): @self.fxt.with_data(StubDataset1, StubDataset2) @@ -84,9 +89,9 @@ def some_callable(data): raise KeyboardInterrupt raises(KeyboardInterrupt)(some_callable)() eq_(mock_call_log[0], (MockLoader, 'load', StubSuperSet)) - eq_(len(mock_call_log), 1, + eq_(len(mock_call_log), 1, "unexpected additional calls were made: %s" % mock_call_log) - + @attr(unit=True) def test_with_data_raises_exception_in_teardown(self): self.fxt.loader = AbusiveMockLoader() @@ -95,7 +100,7 @@ def some_callable(data): pass raises(ValueError)(some_callable)() eq_(mock_call_log[0], (AbusiveMockLoader, 'load', StubSuperSet)) - + @attr(unit=True) def test_with_data_does_soft_teardown_on_exception(self): self.fxt.loader = AbusiveMockLoader() @@ -113,7 +118,7 @@ def some_callable(data): saved_err, ( "unexpected stderr capture: \n<<<<<<\n%s>>>>>>\n" % saved_err) eq_(mock_call_log[0], (AbusiveMockLoader, 'load', StubSuperSet)) - + @attr(unit=True) def test_with_data_decorates_a_generator(self): @self.fxt.with_data(StubDataset1, StubDataset2) @@ -122,12 +127,12 @@ def generated_test(data, step): mock_call_log.append(('some_generator', data.__class__, step)) for step in range(4): yield generated_test, step - + loader = nose.loader.TestLoader() # 0.10 only .... suite = loader.loadTestsFromGenerator(some_generator, None) SilentTestRunner().run(suite) - + eq_(mock_call_log[0], (MockLoader, 'load', StubSuperSet)) eq_(mock_call_log[1], ('some_generator', Fixture.Data, 0)) eq_(mock_call_log[2], (MockLoader, 'unload')) @@ -139,7 +144,7 @@ def generated_test(data, step): eq_(mock_call_log[8], (MockLoader, 'unload')) eq_(mock_call_log[9], (MockLoader, 'load', StubSuperSet)) eq_(mock_call_log[10], ('some_generator', Fixture.Data, 3)) - + @attr(unit=True) def test_generated_tests_call_teardown_on_error(self): @self.fxt.with_data(StubDataset1, StubDataset2) @@ -150,19 +155,19 @@ def generated_test(data, step): raise RuntimeError for step in range(2): yield generated_test, step - + loader = nose.loader.TestLoader() # 0.10 only .... suite = loader.loadTestsFromGenerator(some_generator, None) SilentTestRunner().run(suite) - + eq_(mock_call_log[0], (MockLoader, 'load', StubSuperSet)) eq_(mock_call_log[1], ('some_generator', Fixture.Data, 0)) eq_(mock_call_log[2], (MockLoader, 'unload')) eq_(mock_call_log[3], (MockLoader, 'load', StubSuperSet)) eq_(mock_call_log[4], ('some_generator', Fixture.Data, 1)) eq_(mock_call_log[5], (MockLoader, 'unload')) - + @attr(unit=True) def test_generated_raises_exception_in_teardown(self): self.fxt.loader = AbusiveMockLoader() @@ -172,7 +177,7 @@ def generated_test(data, step): mock_call_log.append(('some_generator', data.__class__, step)) for step in range(2): yield generated_test, step - + loader = nose.loader.TestLoader() # 0.10 only .... suite = loader.loadTestsFromGenerator(some_generator, None) @@ -180,10 +185,10 @@ def generated_test(data, step): def run_tests(): SilentTestRunner().run(suite) run_tests() - + eq_(mock_call_log[0], (AbusiveMockLoader, 'load', StubSuperSet)) eq_(mock_call_log[1], ('some_generator', Fixture.Data, 0)) - + @attr(unit=True) def test_generated_error_raises_soft_exception_in_teardown(self): self.fxt.loader = AbusiveMockLoader() @@ -194,7 +199,7 @@ def generated_test(data, step): raise RuntimeError("error raised from some_generator") for step in range(2): yield generated_test, step - + loader = nose.loader.TestLoader() loader = nose.loader.TestLoader() # 0.10 only .... @@ -203,13 +208,13 @@ def generated_test(data, step): @raises(RuntimeError) def run_tests(): SilentTestRunner().run(suite) - + err = StringIO() sys.stderr = err try: try: run_tests() - except Exception, e: + except Exception as e: etype, val, tb = sys.exc_info() assert 'error raised from some_generator' in str(val), ( "Unexpected: %s" % val) @@ -221,10 +226,10 @@ def run_tests(): assert "ValueError: An exception during teardown" in \ saved_err, ( "unexpected stderr capture: \n<<<<<<\n%s>>>>>>\n" % saved_err) - + eq_(mock_call_log[0], (AbusiveMockLoader, 'load', StubSuperSet)) eq_(mock_call_log[1], ('some_generator', Fixture.Data, 0)) - + @attr(unit=True) def test_with_data_preserves_a_decorated_callable(self): def my_custom_setup(): @@ -243,4 +248,4 @@ def some_callable(data): eq_(mock_call_log[-3], ('some_callable', Fixture.Data)) eq_(mock_call_log[-2], (MockLoader, 'unload')) eq_(mock_call_log[-1], 'my_custom_teardown') - \ No newline at end of file + diff --git a/fixture/test/test_dataset/test_converter.py b/tests/test_converter.py similarity index 91% rename from fixture/test/test_dataset/test_converter.py rename to tests/test_converter.py index 91e5ff3..6fe4e84 100644 --- a/fixture/test/test_dataset/test_converter.py +++ b/tests/test_converter.py @@ -1,16 +1,21 @@ -from fixture.test import attr -from decimal import Decimal import datetime -from fixture import DataSet + +from decimal import Decimal from nose.tools import eq_, raises -from fixture.dataset.converter import * +from six import StringIO + try: import json except ImportError: import simplejson as json -from cStringIO import StringIO - + +from fixture import DataSet +from fixture.dataset.converter import * + +from . import attr + + class FooData(DataSet): class bar: name = "call me bar" @@ -18,26 +23,26 @@ class bar: class foo: name = "name's foo" is_alive = True - + class MuchoData(DataSet): class mucho: d = datetime.date(2008,1,1) dt = datetime.datetime(2008,1,1,2,30,59) - dec = Decimal("1.45667") + dec = Decimal("1.45667") fl = float(1.45667) class DummyError(Exception): pass - + class TestDatasetToJson(object): - + @attr(unit=1) @raises(TypeError) def test_must_be_dataset(self): class NotADataSet(object): pass dataset_to_json(NotADataSet) - + @attr(unit=1) def test_convert_cls(self): eq_(dataset_to_json(FooData), @@ -46,7 +51,7 @@ def test_convert_cls(self): 'is_alive': False}, {'name': "name's foo", 'is_alive': True}])) - + @attr(unit=1) def test_convert_instance(self): foo = FooData() @@ -56,7 +61,7 @@ def test_convert_instance(self): 'is_alive': False}, {'name': "name's foo", 'is_alive': True}])) - + @attr(unit=1) def test_dump_to_file(self): fp = StringIO() @@ -67,7 +72,7 @@ def test_dump_to_file(self): 'is_alive': False}, {'name': "name's foo", 'is_alive': True}])) - + @attr(unit=1) def test_types(self): eq_(json.loads(dataset_to_json(MuchoData)), @@ -77,24 +82,24 @@ def test_types(self): "dec": "1.45667", "fl": 1.45667 }]) - + @attr(unit=1) @raises(DummyError) def test_custom_converter(self): - + def my_default(obj): raise DummyError() - + ds = dataset_to_json(MuchoData, default=my_default) assert not ds, ( "dataset_to_json() should have died but it returned: %s" % ds) - + @attr(unit=1) def test_wrap(self): - + def wrap_in_dict(objects): return {'data': objects} - + eq_(dataset_to_json(FooData, wrap=wrap_in_dict), json.dumps({ 'data': @@ -103,4 +108,4 @@ def wrap_in_dict(objects): {'name': "name's foo", 'is_alive': True}] })) - \ No newline at end of file + diff --git a/fixture/test/test_dataset/test_dataset.py b/tests/test_dataset.py similarity index 94% rename from fixture/test/test_dataset/test_dataset.py rename to tests/test_dataset.py index 582a521..18308b4 100644 --- a/fixture/test/test_dataset/test_dataset.py +++ b/tests/test_dataset.py @@ -1,9 +1,14 @@ +from __future__ import absolute_import + from nose.tools import with_setup, eq_, raises + from fixture import DataSet from fixture.dataset import ( Ref, DataType, DataRow, SuperSet, MergedSuperSet, is_rowlike) -from fixture.test import attr + +from . import attr + class Books(DataSet): def data(self): @@ -18,8 +23,8 @@ def data(self): ('peewee', dict(director='Tim Burton')), ('aquatic', dict(director='cant remember his name')), ) - - + + class Authors(DataSet): class martel: name = 'Yann Martel' @@ -32,7 +37,7 @@ class lolita: class pi: title = 'life of pi' author = Authors.martel.ref('name') - + class CategoryData(DataSet): @@ -45,7 +50,7 @@ class free_stuff: class discounted: id = 3 name = 'discounted stuff' - + class ProductData(DataSet): class truck: id = 1 @@ -76,7 +81,7 @@ def assert_row_dict_for_iter(self, items, count): raise NotImplementedError def assert_itered_n_times(count): raise NotImplementedError - + @attr(unit=True) def test_access(self): self.assert_access(self.dataset) @@ -88,24 +93,24 @@ def test_iter_yields_keys_rows(self): count += 1 items = dict([(k, getattr(row, k)) for k in row.columns()]) self.assert_row_dict_for_iter(k, items, count) - + self.assert_itered_n_times(count) - + class TestDataSet(DataSetTest): def setUp(self): self.dataset = Books() - + def assert_access(self, dataset): eq_(dataset.lolita.title, 'lolita') eq_(dataset.pi.title, 'life of pi') eq_(dataset['lolita'].title, 'lolita') eq_(dataset['pi'].title, 'life of pi') - + def assert_itered_n_times(self, count): eq_(count, 2) - - def assert_row_dict_for_iter(self, key, items, count): + + def assert_row_dict_for_iter(self, key, items, count): if count == 1: eq_(items, {'title': 'lolita'}) elif count == 2: @@ -134,12 +139,12 @@ class pi: @attr(unit=True) def test_row_is_decorated_with_ref(self): assert hasattr(self.dataset_class.lolita, 'ref'), ( - "expected %s to be decorated with a ref method" % + "expected %s to be decorated with a ref method" % self.dataset_class.lolita) assert self.dataset_class.lolita.ref.__class__==Ref, ( - "unexpected ref class: %s" % + "unexpected ref class: %s" % self.dataset_class.lolita.ref.__class__) - + @attr(unit=True) def test_row_is_rowlike(self): assert is_rowlike(self.dataset_class.lolita), ( @@ -153,23 +158,23 @@ class some_row: class StubDataSetNewStyle(DataSet): class some_row(object): pass - + eq_(is_rowlike(StubDataSet.some_row), True) eq_(is_rowlike(StubDataSetNewStyle.some_row), True) eq_(is_rowlike(DataRow(StubDataSet)), True) - + class StubRow: pass class StubRowNewStyle(object): pass eq_(is_rowlike(StubRow), False) eq_(is_rowlike(StubRowNewStyle), False) - + eq_(is_rowlike(1), False) eq_(is_rowlike({}), False) eq_(is_rowlike([]), False) -class InheritedRowsTest(DataSetTest): +class InheritedRowsTest(DataSetTest): def assert_access(self, dataset): def assert_all_attr(type): fxt = getattr(dataset, type) @@ -177,31 +182,31 @@ def assert_all_attr(type): eq_(fxt.session, 'aaaaaaa') eq_(fxt.offer, 1) eq_(fxt.time, 'now') - + assert_all_attr('click') assert_all_attr('submit') assert_all_attr('order') assert_all_attr('activation') - + def assert_itered_n_times(self, count): eq_(count, 4) - + def assert_row_dict_for_iter(self, key, items, count): if count == 1: - eq_(items, - {'offer': 1, 'time': 'now', 'session': 'aaaaaaa', + eq_(items, + {'offer': 1, 'time': 'now', 'session': 'aaaaaaa', 'type': 'activation'}) elif count == 2: - eq_(items, - {'offer': 1, 'time': 'now', 'session': 'aaaaaaa', + eq_(items, + {'offer': 1, 'time': 'now', 'session': 'aaaaaaa', 'type': 'click'}) elif count == 3: - eq_(items, - {'offer': 1, 'time': 'now', 'session': 'aaaaaaa', + eq_(items, + {'offer': 1, 'time': 'now', 'session': 'aaaaaaa', 'type': 'order'}) elif count == 4: - eq_(items, - {'offer': 1, 'time': 'now', 'session': 'aaaaaaa', + eq_(items, + {'offer': 1, 'time': 'now', 'session': 'aaaaaaa', 'type': 'submit'}) else: raise ValueError("unexpected row %s at key %s, count %s" % ( @@ -212,31 +217,31 @@ class click: session = 'aaaaaaa' offer = 1 time = 'now' - + class submit(click): type = 'submit' class order(click): type = 'order' class activation(click): type = 'activation' - + class TestInheritedRows(InheritedRowsTest): dataset = EventData() - + class EventDataNewStyle(DataSet): class click(object): type = 'click' session = 'aaaaaaa' offer = 1 time = 'now' - + class submit(click): type = 'submit' class order(click): type = 'order' class activation(click): type = 'activation' - + class TestInheritedRowsWithNewStyle(InheritedRowsTest): dataset = EventDataNewStyle() @@ -253,37 +258,37 @@ def data(self): ('Lazy-boy', dict(type='Lazy-boy')) ) self.dataset = Chairs() - + def assert_access(self, dataset): eq_(dataset.recliner.type, 'recliner') eq_(dataset['Lazy-boy'].type, 'Lazy-boy') - + # should also have the same defaults as DataSet : eq_(dataset.meta.storable_name, 'PretendStorage') eq_(dataset.meta.row, DataSet.Meta.row) - + def assert_itered_n_times(self, count): eq_(count, 2) - - def assert_row_dict_for_iter(self, key, items, count): + + def assert_row_dict_for_iter(self, key, items, count): if count == 1: eq_(items, {'type': 'recliner'}) elif count == 2: eq_(items, {'type': 'Lazy-boy'}) else: raise ValueError("unexpected row %s, count %s" % (items, count)) - + class SuperSetTest: """tests common behavior of SuperSet like objects.""" SuperSet = None - + def setUp(self): self.superset = self.SuperSet(Books(), Movies()) - + @attr(unit=True) def test_access(self): raise NotImplementedError - + @attr(unit=True) def test_iter_yields_datasets(self): count=0 @@ -299,7 +304,7 @@ def test_iter_yields_datasets(self): class TestSuperSet(SuperSetTest): SuperSet = SuperSet - + @attr(unit=True) def test_access(self): eq_(self.superset.Books.lolita.title, 'lolita') @@ -311,7 +316,7 @@ def test_access(self): class TestMergedSuperSet(SuperSetTest): SuperSet = MergedSuperSet - + @attr(unit=True) def test_access(self): eq_(self.superset.lolita.title, 'lolita') @@ -326,22 +331,22 @@ class ComplexRefTest: def test_construction(self): eq_(self.offer_data.meta.references, [CategoryData, ProductData]) eq_(self.product_data.meta.references, [CategoryData]) - + cat_data = self.product_data.meta.references[0]() eq_(cat_data.meta.references, []) eq_([c.__class__ for c in self.product_data.ref], [CategoryData]) - + class TestComplexRefs(ComplexRefTest): def setUp(self): self.offer_data = OfferData() self.product_data = ProductData() - + class ProductObj(DataSet): class truck: category_id = CategoryData.vehicles class spaceship: category_id = CategoryData.vehicles - + class OfferObj(DataSet): class free_truck: product = ProductData.truck @@ -354,8 +359,8 @@ class TestComplexRefsToObjects(ComplexRefTest): def setUp(self): self.offer_data = OfferObj() self.product_data = ProductObj() - - + + class ProductObjList(DataSet): class truck: categories = [CategoryData.vehicles] @@ -369,13 +374,13 @@ class free_truck: class discounted_spaceship: products = [ProductData.spaceship] categories = [CategoryData.discounted] - + class TestComplexRefsToListsOfObjects(ComplexRefTest): def setUp(self): self.offer_data = OfferObjList() self.product_data = ProductObjList() - - + + class ProductObjTuple(DataSet): class truck: categories = tuple([CategoryData.vehicles]) @@ -389,7 +394,7 @@ class free_truck: class discounted_spaceship: products = tuple([ProductData.spaceship]) categories = tuple([CategoryData.discounted]) - + class TestComplexRefsToTuplesOfObjects(ComplexRefTest): def setUp(self): self.offer_data = OfferObjTuple() @@ -404,9 +409,9 @@ class henry: class jenny: name="Jenny" jenny.buddy = henry - + # will also create an infinite loop : ds = Pals() eq_(ds.meta.references, []) - - \ No newline at end of file + + diff --git a/fixture/test/test_command/test_generate/test_generate.py b/tests/test_generate.py similarity index 90% rename from fixture/test/test_command/test_generate/test_generate.py rename to tests/test_generate.py index 56cbcff..f69e06c 100644 --- a/fixture/test/test_command/test_generate/test_generate.py +++ b/tests/test_generate.py @@ -1,9 +1,12 @@ import sys from nose.tools import eq_, raises, with_setup -from fixture.test import attr + from fixture.command.generate import * - + +from . import attr + + class Stranger(object): """something that cannot produce data.""" pass @@ -19,7 +22,7 @@ class MyHandler(DataHandler): def recognizes(obj_path, obj=None): if obj_path == "myhandler.object_path": return True - + def register_myhandler(): register_handler(MyHandler) @@ -29,19 +32,19 @@ def reset_handlers(): @attr(unit=True) @with_setup(setup=register_myhandler, teardown=reset_handlers) -def test_dataset_handler(): +def test_dataset_handler(): g = DataSetGenerator({}) hnd = g.get_handler("myhandler.object_path") assert isinstance(hnd, MyHandler) - - + + @attr(unit=True) @raises(UnrecognizedObject) @with_setup(setup=register_myhandler, teardown=reset_handlers) def test_unrecognized_dataset_handler(): g = DataSetGenerator({}) hnd = g.get_handler("NOTHONG") - + @attr(unit=True) def test_requires_option(): required_idents = [] @@ -53,12 +56,12 @@ def mock_require(ident): sys.stderr = sys.stdout try: try: - dataset_generator([ 'bad.object.path', + dataset_generator([ 'bad.object.path', '--require-egg=foo==1.0', '--require-egg=baz>=2.0b']) - except SystemExit, e: + except SystemExit as e: pass finally: - pkg_resources.require = orig_require + pkg_resources.require = orig_require sys.stderr = sys.__stderr__ eq_(required_idents, ['foo==1.0', 'baz>=2.0b']) @@ -72,11 +75,11 @@ def some_method(self): @attr(unit=1) def test_resolve_path_to_function(): eq_(resolve_function_path("%s:some_function" % __name__), some_function) - + @attr(unit=1) def test_resolve_path_to_method(): eq_(resolve_function_path("%s:SomeClass.some_method" % __name__), SomeClass.some_method) - + @attr(unit=1) def test_resolve_path_to_module(): # Note that this is not realistic. I think we'd always want a callable @@ -86,4 +89,4 @@ def test_resolve_path_to_module(): @raises(ImportError) def test_resolve_bad_path(): resolve_function_path("nomoduleshouldbenamedthis.nowhere:Babu") - \ No newline at end of file + diff --git a/fixture/test/test_command/test_generate/test_generate_sqlalchemy.py b/tests/test_generate_sqlalchemy.py similarity index 88% rename from fixture/test/test_command/test_generate/test_generate_sqlalchemy.py rename to tests/test_generate_sqlalchemy.py index 5a27988..7cd3292 100644 --- a/fixture/test/test_command/test_generate/test_generate_sqlalchemy.py +++ b/tests/test_generate_sqlalchemy.py @@ -1,4 +1,4 @@ - +from __future__ import print_function import sys from nose.tools import eq_, raises from nose.exc import SkipTest @@ -10,13 +10,11 @@ from fixture.command.generate.template import Template from fixture.command.generate.generate_sqlalchemy import * -from fixture.test import conf, attr -from fixture.test import env_supports -from fixture.test.test_command.test_generate import ( - GenerateTest, UsingTesttoolsTemplate, UsingFixtureTemplate) +from .generate_helper import GenerateTest, UsingTesttoolsTemplate, UsingFixtureTemplate +from . import conf, attr, env_supports from fixture.examples.db import sqlalchemy_examples from fixture.examples.db.sqlalchemy_examples import ( - metadata, Category, Product, Offer, categories, products, offers ) + metadata, Category, Product, Offer, categories, products, offers ) realmeta = None RealSession = None @@ -36,17 +34,17 @@ class env(object): somedict = { 'barbara': Table('barbara', meta, Column('id', INT, primary_key=True)) } - + e = TableEnv('fixture.examples.db.sqlalchemy_examples', env, somedict) - + tbl = e[products] eq_(tbl['name'], 'products') eq_(tbl['module'], sqlalchemy_examples) - + tbl = e[env.taxi] eq_(tbl['name'], 'taxi') eq_(tbl['module'], sys.modules[__name__]) - + tbl = e[somedict['barbara']] eq_(tbl['name'], 'barbara') # can't get module from dict... @@ -71,11 +69,11 @@ class options: dsn = conf.LITE_DSN env = ['fixture.examples.db.sqlalchemy_examples'] self.generator = DataSetGenerator(options, template=StubTemplate()) - + def tearDown(self): from sqlalchemy.orm import clear_mappers clear_mappers() - + @attr(unit=True) def test_recognizes_mapped_class(self): from sqlalchemy.orm import mapper @@ -84,19 +82,19 @@ def test_recognizes_mapped_class(self): "%s.MappableObject" % (MappableObject.__module__), obj=MappableObject) eq_(type(hnd), SQLAlchemyMappedClassHandler) - + @attr(unit=True) def test_recognizes_session_mapper(self): from sqlalchemy.orm import mapper, sessionmaker, scoped_session - - ScopedSession = scoped_session(sessionmaker(autoflush=False, transactional=False)) - ScopedSession.mapper(MappableObject, categories) - + + ScopedSession = scoped_session(sessionmaker(autoflush=False)) + mapper(MappableObject, categories) + hnd = self.generator.get_handler( "%s.MappableObject" % (MappableObject.__module__), obj=MappableObject) eq_(type(hnd), SQLAlchemySessionMapperHandler) - + @attr(unit=True) @raises(NotImplementedError) def test_recognizes_table_object(self): @@ -104,7 +102,7 @@ def test_recognizes_table_object(self): "%s.categories" % (sqlalchemy_examples.__name__), obj=categories) eq_(type(hnd), SQLAlchemyTableHandler) - + class HandlerQueryTest(object): class CategoryData(DataSet): @@ -114,7 +112,7 @@ class curvy: name='curvy' class jagged: name='jagged' - + @attr(unit=True, generate=True) def test_find(self): try: @@ -127,7 +125,7 @@ def test_find(self): assert rs, "unexpected record set: %s" % rs obj = [o for o in rs] eq_(obj[0].name, self.data.CategoryData.bumpy.name) - + @attr(unit=True, generate=True) def test_findall(self): try: @@ -139,11 +137,11 @@ def test_findall(self): self.hnd.commit() assert rs, "unexpected record set: %s" % rs names = set([o.name for o in rs]) - print names + print(names) assert self.data.CategoryData.bumpy.name in names assert self.data.CategoryData.curvy.name in names assert self.data.CategoryData.jagged.name in names - + @attr(unit=True, generate=True) def test_findall_accepts_query(self): try: @@ -158,22 +156,22 @@ def test_findall_accepts_query(self): eq_(len(obj), 1) class TestQueryMappedClass(HandlerQueryTest): - + def setUp(self): from fixture import SQLAlchemyFixture, NamedDataStyle import sqlalchemy from sqlalchemy.orm import mapper, relation, clear_mappers from sqlalchemy import create_engine - + metadata.bind = create_engine(conf.LITE_DSN) metadata.create_all() - + class options: dsn = conf.LITE_DSN env = ['fixture.examples.db.sqlalchemy_examples'] self.options = options self.generator = DataSetGenerator(self.options, template=StubTemplate()) - + mapper(Category, categories) mapper(Product, products, properties={ 'category': relation(Category), @@ -182,114 +180,114 @@ class options: 'category': relation(Category, backref='products'), 'product': relation(Product) }) - + self.fixture = SQLAlchemyFixture( - env=sqlalchemy_examples, + env=sqlalchemy_examples, style=NamedDataStyle(), engine=metadata.bind) self.data = self.fixture.data(self.CategoryData) self.data.setup() - + self.hnd = self.generator.get_handler( "%s.Category" % (Category.__module__), obj=Category, connection=metadata.bind) self.hnd.begin() - + def tearDown(self): from sqlalchemy.orm import clear_mappers self.data.teardown() metadata.drop_all() clear_mappers() - + class TestQuerySessionMappedClass(HandlerQueryTest): - + def setUp(self): from fixture import SQLAlchemyFixture, NamedDataStyle import sqlalchemy from sqlalchemy.orm import ( mapper, relation, clear_mappers, sessionmaker, scoped_session) from sqlalchemy import create_engine - + metadata.bind = create_engine(conf.LITE_DSN) metadata.create_all() - + class options: dsn = conf.LITE_DSN env = ['fixture.examples.db.sqlalchemy_examples'] self.options = options self.generator = DataSetGenerator(self.options, template=StubTemplate()) - - ScopedSession = scoped_session(sessionmaker(autoflush=False, transactional=True)) - - ScopedSession.mapper(Category, categories, save_on_init=False) - ScopedSession.mapper(Product, products, properties={ + + ScopedSession = scoped_session(sessionmaker(autoflush=False)) + + mapper(Category, categories, save_on_init=False) + mapper(Product, products, properties={ 'category': relation(Category), }, save_on_init=False) - ScopedSession.mapper(Offer, offers, properties={ + mapper(Offer, offers, properties={ 'category': relation(Category, backref='products'), 'product': relation(Product) }, save_on_init=False) - + self.fixture = SQLAlchemyFixture( - env=sqlalchemy_examples, + env=sqlalchemy_examples, style=NamedDataStyle(), engine=metadata.bind) self.data = self.fixture.data(self.CategoryData) self.data.setup() - + self.hnd = self.generator.get_handler( "%s.Category" % (Category.__module__), obj=Category, connection=metadata.bind) self.hnd.begin() - + def tearDown(self): from sqlalchemy.orm import clear_mappers self.data.teardown() metadata.drop_all() clear_mappers() - + # class TestSQLAlchemyTableHandler(HandlerQueryTest): -# handler_path = "%s.categories" % (sqlalchemy_examples.__name__) +# handler_path = "%s.categories" % (sqlalchemy_examples.__name__) class TestSQLAlchemyGenerate(UsingFixtureTemplate, GenerateTest): args = [ - "fixture.examples.db.sqlalchemy_examples.Offer", + "fixture.examples.db.sqlalchemy_examples.Offer", "--dsn", str(conf.HEAVY_DSN), "--connect", "fixture.examples.db.sqlalchemy_examples:connect", "--setup", "fixture.examples.db.sqlalchemy_examples:setup_mappers"] - + def setUp(self): global realmeta, RealSession, memmeta, MemSession import sqlalchemy from sqlalchemy import MetaData, create_engine from sqlalchemy.orm import clear_mappers, scoped_session, sessionmaker, relation clear_mappers() - + realmeta = MetaData(bind=create_engine(conf.HEAVY_DSN)) - RealSession = scoped_session(sessionmaker(autoflush=False, transactional=False, bind=realmeta.bind)) - + RealSession = scoped_session(sessionmaker(autoflush=False, bind=realmeta.bind)) + memmeta = MetaData(bind=create_engine(conf.LITE_DSN)) - MemSession = scoped_session(sessionmaker(autoflush=True, transactional=False, bind=memmeta.bind)) - + MemSession = scoped_session(sessionmaker(autoflush=True, bind=memmeta.bind)) + self.setup_mappers() - + session = RealSession() - + # data source : categories.create(bind=realmeta.bind) products.create(bind=realmeta.bind) offers.create(bind=realmeta.bind) - + parkas = Category() parkas.name = "parkas" - session.save(parkas) + session.add(parkas) jersey = Product() jersey.name = "jersey" jersey.category = parkas - session.save(jersey) - + session.add(jersey) + rebates = Category() rebates.name = "rebates" rebates.id = 2 @@ -297,16 +295,16 @@ def setUp(self): super_cashback.name = "super cash back!" super_cashback.product = jersey super_cashback.category = rebates - session.save(super_cashback) - session.save(rebates) - + session.add(super_cashback) + session.add(rebates) + session.flush() - + # data target: categories.create(bind=memmeta.bind) products.create(bind=memmeta.bind) offers.create(bind=memmeta.bind) - + def tearDown(self): if realmeta: offers.drop(bind=realmeta.bind) @@ -316,47 +314,47 @@ def tearDown(self): offers.drop(bind=memmeta.bind) products.drop(bind=memmeta.bind) categories.drop(bind=memmeta.bind) - + def assert_data_loaded(self, fxt): session = MemSession() session.clear() - + rs = session.query(Category).order_by('name').all() eq_(len(rs), 2) parkas = rs[0] rebates = rs[1] eq_(parkas.name, "parkas") eq_(rebates.name, "rebates") - + rs = session.query(Product).all() eq_(len(rs), 1) eq_(rs[0].name, "jersey") - + rs = session.query(Offer).all() eq_(len(rs), 1) eq_(rs[0].name, "super cash back!") - - # note that here we test that colliding fixture key links + + # note that here we test that colliding fixture key links # got resolved correctly : eq_(session.query(Category).filter_by(id=fxt.products_1.category_id).one(), parkas) eq_(session.query(Category).filter_by(id=fxt.offers_1.category_id).one(), rebates) - + def assert_env_is_clean(self): # sanity check, ensure source has data : session = RealSession() session.clear() assert session.query(Product).count() - + # ensure target is empty : session = MemSession() session.clear() eq_(session.query(Product).count(), 0) - + ### FIXME, this shouldn't be so lame # clear mappers so that the dataset_generator() can setup mappers on its own: from sqlalchemy.orm import clear_mappers clear_mappers() - + def assert_env_generated_ok(self, e): # get rid of the source so that we # are sure we aren't ever querying the source db @@ -364,7 +362,7 @@ def assert_env_generated_ok(self, e): # offers.drop(bind=engine) # products.drop(bind=engine) # categories.drop(bind=engine) - + def create_fixture(self): return SQLAlchemyFixture( env = self.env, @@ -373,12 +371,12 @@ def create_fixture(self): # *load* data into the memory db : engine = memmeta.bind ) - + def load_env(self, env): - data = self.load_datasets(env, + data = self.load_datasets(env, [env['categoriesData'], env['productsData'], env['offersData']]) return data - + def setup_mappers(self): from sqlalchemy.orm import mapper, relation mapper(Category, categories) @@ -389,7 +387,7 @@ def setup_mappers(self): 'category': relation(Category), 'product': relation(Product) }) - + def visit_loader(self, loader): loader.engine = memmeta.bind - \ No newline at end of file + diff --git a/fixture/test/test_command/test_generate/test_generate_sqlobject.py b/tests/test_generate_sqlobject.py similarity index 86% rename from fixture/test/test_command/test_generate/test_generate_sqlobject.py rename to tests/test_generate_sqlobject.py index 0eb3bc4..6f5ae20 100644 --- a/fixture/test/test_command/test_generate/test_generate_sqlobject.py +++ b/tests/test_generate_sqlobject.py @@ -6,11 +6,11 @@ from fixture.command.generate import DataSetGenerator, dataset_generator from fixture.dataset import MergedSuperSet from fixture.style import NamedDataStyle -from fixture.test.test_command.test_generate import ( - compile_, GenerateTest, UsingTesttoolsTemplate, UsingFixtureTemplate) -from fixture.test import env_supports, conf +from .generate_helper import (compile_, GenerateTest, UsingTesttoolsTemplate, + UsingFixtureTemplate) +from . import env_supports, conf from fixture.examples.db.sqlobject_examples import ( - Category, Product, Offer, setup_db, teardown_db) + Category, Product, Offer, setup_db, teardown_db) sqlhub = None realconn = None @@ -20,9 +20,9 @@ def setup(): global memconn, realconn, sqlhub if not env_supports.sqlobject: raise SkipTest - + from sqlobject import connectionForURI, sqlhub - + realconn = connectionForURI(conf.HEAVY_DSN) memconn = connectionForURI("sqlite:/:memory:") @@ -34,9 +34,9 @@ def teardown(): class SQLObjectGenerateTest(GenerateTest): args = [ - "fixture.examples.db.sqlobject_examples.Offer", + "fixture.examples.db.sqlobject_examples.Offer", "--dsn", str(conf.HEAVY_DSN) ] - + def assert_data_loaded(self, fxt): rs = Category.select() eq_(rs.count(), 2) @@ -53,16 +53,16 @@ def assert_data_loaded(self, fxt): eq_(rs.count(), 1) eq_(rs[0].name, "super cash back!") - # note that here we test that colliding fixture key links + # note that here we test that colliding fixture key links # got resolved correctly : eq_(Category.get(fxt.product_1.category_id), parkas) eq_(Category.get(fxt.offer_1.category_id), rebates) - + def assert_env_is_clean(self): # sanity check : assert Product.select(connection=realconn).count() assert not Product.select(connection=memconn).count() - + def assert_env_generated_ok(self, e): CategoryData = e['CategoryData'] ProductData = e['ProductData'] @@ -72,49 +72,49 @@ def assert_env_generated_ok(self, e): Offer.clearTable(connection=realconn) Product.clearTable(connection=realconn) Category.clearTable(connection=realconn) - + def create_fixture(self): return SQLObjectFixture( env = self.env, style = NamedDataStyle(), dataclass = MergedSuperSet, ) - + def load_datasets(self, module, conn, datasets): raise NotImplementedError - + def load_env(self, env): # set our conn back to memory then load the fixture. # hmm, seems hoky sqlhub.processConnection = memconn - data = self.load_datasets(env, + data = self.load_datasets(env, [env['CategoryData'], env['ProductData'], env['OfferData']]) return data - - def setUp(self): + + def setUp(self): setup_db(realconn) sqlhub.processConnection = realconn - + parkas = Category(name="parkas") jersey = Product(name="jersey", category=parkas) rebates = Category(name="rebates") - super_cashback = Offer( name="super cash back!", + super_cashback = Offer( name="super cash back!", product=jersey, category=rebates) sqlhub.processConnection = None - + # now get the loading db as a sqlite mem connection : setup_db(memconn) - + def tearDown(self): sqlhub.processConnection = None teardown_db(realconn) teardown_db(memconn) - + class TestSQLObjectTesttools(UsingTesttoolsTemplate, SQLObjectGenerateTest): pass - + class TestSQLObjectFixture(UsingFixtureTemplate, SQLObjectGenerateTest): def visit_loader(self, loader): loader.connection = memconn - - \ No newline at end of file + + diff --git a/fixture/test/test_io.py b/tests/test_io.py similarity index 93% rename from fixture/test/test_io.py rename to tests/test_io.py index 11cd689..73ae0bd 100644 --- a/fixture/test/test_io.py +++ b/tests/test_io.py @@ -1,6 +1,9 @@ # -*- coding: latin_1 -*- +from __future__ import absolute_import + import os +import six from nose.tools import eq_ from os.path import join, exists, isdir, basename from os import path @@ -8,7 +11,7 @@ from nose.tools import eq_, raises from fixture import TempIO from fixture.io import mkdirall, putfile -from fixture.test import attr +from . import attr french = "tu pense qu'on peut m'utiliser comme ça?" @@ -19,14 +22,14 @@ def test_mkdirall(): try: mkdirall(join(tmp, 'blah/blah/')) assert exists(join(tmp, 'blah/blah')) - + # relative too ... os.chdir(tmp) mkdirall('ici/ou/la') assert exists('ici') assert exists('ici/ou') assert exists('ici/ou/la') - + finally: del tmp os.chdir(cwd) @@ -36,34 +39,34 @@ def test_putfile(): tmp = TempIO() cwd = os.getcwd() try: - + fname = join(tmp, 'french.txt') putfile(fname, french) - + assert exists(fname) - + f = open(fname, 'r') contents = f.read() f.close() assert contents == french - + # can make lazy dirs .. fname = join(tmp, 'ou/est/tu/frenchy.txt') putfile(fname, "") assert exists(fname) - + # relative : os.chdir(tmp) putfile('bahh', '') assert exists(join(tmp, 'bahh')) - + finally: del tmp os.chdir(cwd) @attr(unit=True) def test_del_self_destructs(): - """asserts that a module level reference self destructs + """asserts that a module level reference self destructs without exception.""" global _TMP _TMP = TempIO() @@ -71,11 +74,11 @@ def test_del_self_destructs(): class TestTempIO(object): def setUp(self): self.tmp = TempIO() - + def tearDown(self): if hasattr(self, 'tmp'): del self.tmp - + @attr(unit=True) def test_deferred(self): tmp = TempIO(deferred=True) @@ -83,7 +86,7 @@ def test_deferred(self): assert exists(root) del tmp assert exists(root) - + tmp2 = TempIO(deferred=False) root = str(tmp2) assert exists(root) @@ -118,25 +121,25 @@ def test_newdir(self): assert exists(self.tmp.rick_james) assert self.tmp.rick_james.startswith(self.tmp) assert self.tmp.rick_james.endswith("rick_james") - + self.tmp.rick_james = "rick james" assert exists(self.tmp.rick_james) assert self.tmp.rick_james.startswith(self.tmp) assert self.tmp.rick_james.endswith("rick james") - + self.tmp.rick_james = "rick_james/i/love/you" assert exists(self.tmp.rick_james) assert self.tmp.rick_james.startswith(self.tmp) assert self.tmp.rick_james.endswith("rick_james/i/love/you") - + @attr(unit=True) def test_path_interface(self): self.tmp.dupes = "processed/dupes" def endswith(p, end): assert p.endswith(end), "%s did not end in %s" % (p,end) - + eq_(self.tmp.dupes, path.join(self.tmp, "processed/dupes")) - eq_(self.tmp.dupes.abspath(), + eq_(self.tmp.dupes.abspath(), path.abspath(path.join(self.tmp, "processed/dupes"))) eq_(self.tmp.dupes.basename(), "dupes") eq_(self.tmp.dupes.dirname(), path.join(self.tmp, "processed")) @@ -145,15 +148,15 @@ def endswith(p, end): eq_(self.tmp.dupes.join("foo", "bar"), path.abspath(path.join( self.tmp, "processed/dupes/foo/bar"))) eq_(self.tmp.dupes.join("foo", "bar").exists(), False) - + self.tmp.dupes.more = "foo/bar" eq_(path.exists(path.join(self.tmp.dupes, "foo", "bar")), True) eq_(self.tmp.dupes.join("foo", "bar").exists(), True) - - eq_(self.tmp.dupes.realpath(), + + eq_(self.tmp.dupes.realpath(), path.realpath(path.join(self.tmp, "processed/dupes"))) eq_(self.tmp.dupes.splitpath(), path.split(self.tmp.dupes)) - eq_(self.tmp.dupes.splitext(), (path.realpath(path.join(self.tmp, + eq_(self.tmp.dupes.splitext(), (path.realpath(path.join(self.tmp, "processed/dupes")), "")) @attr(unit=True) @@ -170,17 +173,17 @@ def test_putfile(self): # check laziness of putfile's mkdir'ing : self.tmp.putfile('petite/grenouille/ribbit/frenchy.txt', french) - assert exists(join(self.tmp, + assert exists(join(self.tmp, 'petite/grenouille/ribbit/frenchy.txt')) # make sure that a second call will only create directories necessary: self.tmp.putfile('petite/grenouille/ribbit/foo.txt', "foo") - + @attr(unit=True) def test_putfile_mode(self): - self.tmp.putfile('frenchy.txt', "", 'wb') + self.tmp.putfile('frenchy.txt', six.b(""), 'wb') f = open(join(self.tmp, 'frenchy.txt'), 'rb') f.read() - + @attr(unit=True) @raises(TypeError) def test_putfile_accepts_only_relative_paths(self): @@ -195,4 +198,4 @@ def test_rmtree(self): @attr(unit=True) def test_root(self): assert isdir(self.tmp) - \ No newline at end of file + diff --git a/fixture/test/test_loadable/test_loadable.py b/tests/test_loadable.py similarity index 90% rename from fixture/test/test_loadable/test_loadable.py rename to tests/test_loadable.py index 23fbfbe..b759640 100644 --- a/fixture/test/test_loadable/test_loadable.py +++ b/tests/test_loadable.py @@ -1,17 +1,20 @@ import nose +import unittest + from nose.tools import raises, eq_ from nose.exc import SkipTest -import unittest -from fixture import DataSet, NamedDataStyle + +from fixture import TempIO, DataSet, NamedDataStyle from fixture.loadable import ( LoadableFixture, EnvLoadableFixture, DBLoadableFixture) -from fixture.test import attr, env_supports, PrudentTestResult -from fixture import TempIO + +from . import attr, env_supports, PrudentTestResult + def exec_if_supported(code, globals={}, locals={}): - # seems that for using from __future__ exec needs to think it's compiling a - # module + # seems that for using from __future__ exec needs to think it's compiling a + # modul tmp = TempIO() try: try: @@ -25,52 +28,52 @@ def exec_if_supported(code, globals={}, locals={}): class LoadableTest(object): """tests the behavior of fixture.loadable.LoadableFixture object. - + to test combinations of loaders and datasets, mix this into a TestCase. """ fixture = None - + def assert_data_loaded(self, dataset): """assert that the dataset was loaded.""" raise NotImplementedError - + def assert_data_torndown(self): """assert that the dataset was torn down.""" raise NotImplementedError - + def datasets(self): """returns some datasets.""" raise NotImplementedError - + def test_DataTestCase(self): from fixture import DataTestCase import unittest inspector = self class ns: tested = False - + class SomeDataTestCase(DataTestCase, unittest.TestCase): fixture = inspector.fixture datasets = inspector.datasets() def test_data_test(self): ns.tested = True inspector.assert_data_loaded(self.data) - + res = PrudentTestResult() loader = unittest.TestLoader() suite = loader.loadTestsFromTestCase(SomeDataTestCase) suite(res) - + eq_(res.failures, []) eq_(res.errors, []) eq_(res.testsRun, 1) eq_(ns.tested, True) - + inspector.assert_data_torndown() - + def test_with_data(self): import nose, unittest - + class ns: was_setup=False was_torndown=False @@ -78,24 +81,24 @@ def setup(): ns.was_setup=True def teardown(): ns.was_torndown=True - + kw = dict(setup=setup, teardown=teardown) @self.fixture.with_data(*self.datasets(), **kw) def test_data_test(data): eq_(ns.was_setup, True) self.assert_data_loaded(data) - + case = nose.case.FunctionTestCase(test_data_test) res = PrudentTestResult() case(res) - + eq_(res.failures, []) eq_(res.errors, []) eq_(res.testsRun, 1) - + eq_(ns.was_torndown, True) self.assert_data_torndown() - + def test_with_data_as_d(self): c = """ from __future__ import with_statement @@ -110,7 +113,7 @@ class HavingCategoryData: """mixin that adds data to a LoadableTest.""" def datasets(self): """returns a single category data set.""" - + class CategoryData(DataSet): def data(self): return ( @@ -128,46 +131,46 @@ class gray_stuff: class yellow_stuff: id = 2 name = 'yellow' - + return [CategoryData] - -class HavingOfferProductData: + +class HavingOfferProductData: """mixin that adds data to a LoadableTest.""" def datasets(self): - """returns some datasets.""" + """returns some datasets.""" class CategoryData(DataSet): def data(self): return ( ('cars', dict(id=1, name='cars')), ('free_stuff', dict(id=2, name='get free stuff')),) - + class ProductData(DataSet): class Meta: references = (CategoryData,) def data(self): return (('truck', dict( - id=1, - name='truck', + id=1, + name='truck', category_id=self.ref.cars.id)),) - + class OfferData(DataSet): class Meta: references = (CategoryData, ProductData) def data(self): return ( ('free_truck', dict( - id=1, + id=1, name='free truck', product_id=self.ref.truck.id, category_id=self.ref.free_stuff.id)), ) return [OfferData, ProductData] - -class HavingOfferProductAsDataType: + +class HavingOfferProductAsDataType: """mixin that adds data to a LoadableTest.""" def datasets(self): """returns some datasets.""" - + class CategoryData(DataSet): class cars: id = 1 @@ -175,64 +178,64 @@ class cars: class free_stuff: id = 2 name = 'get free stuff' - - ## FIXME: replace all instances of + + ## FIXME: replace all instances of ## foo_id with foo ... that is, we need refs to data sets class ProductData(DataSet): class truck: id = 1 name = 'truck' category_id = CategoryData.cars.ref('id') - + class OfferData(DataSet): class free_truck: id = 1 name = "it's a free truck" product_id = ProductData.truck.ref('id') category_id = CategoryData.free_stuff.ref('id') - + return [ProductData, OfferData] - -class HavingReferencedOfferProduct: + +class HavingReferencedOfferProduct: """mixin that adds data to a LoadableTest.""" def datasets(self): """returns some datasets.""" - + class CategoryData(DataSet): class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + class ProductData(DataSet): class truck: name = 'truck' category = CategoryData.cars - + class OfferData(DataSet): class free_truck: name = "it's a free truck" product = ProductData.truck category = CategoryData.free_stuff - + return [ProductData, OfferData] - -class HavingRefInheritedOfferProduct: + +class HavingRefInheritedOfferProduct: """mixin that adds data to a LoadableTest.""" def datasets(self): """returns some datasets.""" - + class CategoryData(DataSet): class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + class ProductData(DataSet): class truck: name = 'truck' category = CategoryData.cars - + class OfferData(DataSet): class free_truck: name = "it's a free truck" @@ -243,21 +246,21 @@ class free_spaceship(free_truck): name = "it's a free spaceship" class free_tv(free_spaceship): name = "it's a free TV" - + return [ProductData, OfferData] - + class LoaderPartialRecoveryTest(HavingOfferProductData): fixture = None - + def assert_partial_load_aborted(self): """assert that no datasets were loaded.""" raise NotImplementedError - + def partial_datasets(self): """returns some real datasets, then some dummy ones.""" d = [ds for ds in self.datasets()] - + class DummyData(DataSet): def data(self): return ( @@ -265,30 +268,30 @@ def data(self): ) d.append(DummyData) return d - + def test_with_data_iterruption(self): @raises(LoadableFixture.StorageMediaNotFound) @self.fixture.with_data(*self.partial_datasets()) def test_partial_datasets(fxt): pass - test_partial_datasets() + test_partial_datasets() self.assert_partial_load_aborted() class TestEnvLoadableFixture(object): @raises(ValueError) def test_storable_object_cannot_equal_dataset(self): - class SomeEnvLoadableFixture(EnvLoadableFixture): + class SomeEnvLoadableFixture(EnvLoadableFixture): def rollback(self): pass def commit(self): pass - + class MyDataSet(DataSet): class some_row: some_column = 'foo' - + efixture = SomeEnvLoadableFixture(env={'MyDataSet': MyDataSet}) data = efixture.data(MyDataSet) data.setup() - + class StubLoadableFixture(DBLoadableFixture): def create_transaction(self): class NoTrans: @@ -296,7 +299,7 @@ def commit(self): pass return NoTrans() class MockStorageMedium(DBLoadableFixture.StorageMediumAdapter): - def save(self, row, column_vals): + def save(self, row, column_vals): obj = self.medium() for k,v in column_vals: setattr(obj, k, v) @@ -320,22 +323,22 @@ class bob: class PetData(DataSet): class fido: owner_name = PersonData.bob.ref('name') - + ldr = StubLoadableFixture( style=NamedDataStyle(), medium=MockStorageMedium, env=locals()) ldr.begin() ldr.load_dataset(PetData()) - + eq_(calls[0], (Person, 'save')) eq_(calls[1], (Pet, 'save')) - + bob_db_obj = \ ldr.loaded[PersonData].meta._stored_objects.get_object('bob') eq_(bob_db_obj.name, PersonData.bob.name) fido_db_obj = \ ldr.loaded[PetData].meta._stored_objects.get_object('fido') eq_(fido_db_obj.owner_name, PersonData.bob.name) - + @attr(unit=True) def test_row_refs_are_resolved(self): calls = [] @@ -352,22 +355,22 @@ class bob: class PetData(DataSet): class fido: owner = PersonData.bob - + ldr = StubLoadableFixture( style=NamedDataStyle(), medium=MockStorageMedium, env=locals()) ldr.begin() ldr.load_dataset(PetData()) - + eq_(calls[0], (Person, 'save')) eq_(calls[1], (Pet, 'save')) - + bob_db_obj = \ ldr.loaded[PersonData].meta._stored_objects.get_object('bob') eq_(bob_db_obj.name, PersonData.bob.name) fido_db_obj = \ ldr.loaded[PetData].meta._stored_objects.get_object('fido') eq_(fido_db_obj.owner, bob_db_obj) - + @attr(unit=True) def test_lists_of_row_refs_are_resolved(self): calls = [] @@ -386,28 +389,28 @@ class stacy: class PetData(DataSet): class fido: owners = [PersonData.bob, PersonData.stacy] - + ldr = StubLoadableFixture( style=NamedDataStyle(), medium=MockStorageMedium, env=locals()) ldr.begin() ldr.load_dataset(PetData()) - + eq_(calls[0], (Person, 'save')) eq_(calls[1], (Person, 'save')) eq_(calls[2], (Pet, 'save')) - + bob_db_obj = \ ldr.loaded[PersonData].meta._stored_objects.get_object('bob') stacy_db_obj = \ ldr.loaded[PersonData].meta._stored_objects.get_object('stacy') fido_db_obj = \ ldr.loaded[PetData].meta._stored_objects.get_object('fido') - eq_(fido_db_obj.owners, [bob_db_obj, stacy_db_obj]) - + eq_(list(fido_db_obj.owners), [bob_db_obj, stacy_db_obj]) + @attr(unit=True) def test_DataSet_cannot_ref_self(self): class MockDataObject(object): - def save(self): + def save(self): pass class Person(MockDataObject): name = None @@ -420,15 +423,15 @@ class bob: class jenny: name = "Jenny Ginetti" jenny.friend = bob - + ldr = StubLoadableFixture( style=NamedDataStyle(), medium=MockStorageMedium, env=locals()) ldr.begin() # was raising load error because the object was getting stored : ldr.load_dataset(PersonData()) - + bob_db_obj = \ ldr.loaded[PersonData].meta._stored_objects.get_object('bob') jenny_db_obj = \ ldr.loaded[PersonData].meta._stored_objects.get_object('jenny') - eq_(jenny_db_obj.friend, bob_db_obj) \ No newline at end of file + eq_(jenny_db_obj.friend, bob_db_obj) diff --git a/fixture/test/test_loadable/test_django/__init__.py b/tests/test_loadable/test_django/__init__.py similarity index 100% rename from fixture/test/test_loadable/test_django/__init__.py rename to tests/test_loadable/test_django/__init__.py diff --git a/fixture/test/test_loadable/test_django/fixture-with-django-doctests.rst b/tests/test_loadable/test_django/fixture-with-django-doctests.rst similarity index 100% rename from fixture/test/test_loadable/test_django/fixture-with-django-doctests.rst rename to tests/test_loadable/test_django/fixture-with-django-doctests.rst diff --git a/fixture/test/test_loadable/test_django/fixtures.py b/tests/test_loadable/test_django/fixtures.py similarity index 100% rename from fixture/test/test_loadable/test_django/fixtures.py rename to tests/test_loadable/test_django/fixtures.py diff --git a/fixture/test/test_loadable/test_django/test_djangoenv.py b/tests/test_loadable/test_django/test_djangoenv.py similarity index 100% rename from fixture/test/test_loadable/test_django/test_djangoenv.py rename to tests/test_loadable/test_django/test_djangoenv.py diff --git a/fixture/test/test_loadable/test_django/test_djangomeduim.py b/tests/test_loadable/test_django/test_djangomeduim.py similarity index 97% rename from fixture/test/test_loadable/test_django/test_djangomeduim.py rename to tests/test_loadable/test_django/test_djangomeduim.py index f327aae..afa5680 100644 --- a/fixture/test/test_loadable/test_django/test_djangomeduim.py +++ b/tests/test_loadable/test_django/test_djangomeduim.py @@ -2,7 +2,7 @@ from fixture import DjangoFixture from fixture.style import NamedDataStyle from fixture.loadable.django_loadable import field_is_required -from fixtures import * +from fixture import * from util import * from nose.tools import raises from fixture.examples.django_example.app import models @@ -10,16 +10,16 @@ def _check_row(medium, column_vals): medium._check_schema(column_vals) - + def test_schema_conformance(): - + valid_rels = ValidNoRelationsData() invalid_rels = InvalidNoRelationsData() class NoRelations(django_models.Model): char = django_models.CharField(max_length=10) num = django_models.IntegerField() - + for dataset, model, callable in \ [ (valid_rels, NoRelations, _check_row), @@ -45,7 +45,7 @@ class TestMod(models.Model): nullable_date = models.DateTimeField(null=True, auto_now_add=True) default_date = models.DateTimeField(default=datetime.now) - + required_matrix = dict( pk=False, req=True, @@ -56,7 +56,7 @@ class TestMod(models.Model): nullable_date=False, default_date=False, ) - + def check_field_required(fld, result): msg = "field '%s': null=%s, primary_key=%s, auto_now=%s, auto_now_add=%s " \ "should be %s" @@ -66,8 +66,8 @@ def check_field_required(fld, result): fld.primary_key, auto_now, auto_now_add, result) - + for item in required_matrix.items(): fld, result = item check_field_required.description = "%s required? %s" % item - yield check_field_required, TestMod._meta.get_field(fld), result \ No newline at end of file + yield check_field_required, TestMod._meta.get_field(fld), result diff --git a/fixture/test/test_loadable/test_django/test_loading.py b/tests/test_loadable/test_django/test_loading.py similarity index 100% rename from fixture/test/test_loadable/test_django/test_loading.py rename to tests/test_loadable/test_django/test_loading.py diff --git a/fixture/test/test_loadable/test_django/test_wrong_declarations.py b/tests/test_loadable/test_django/test_wrong_declarations.py similarity index 92% rename from fixture/test/test_loadable/test_django/test_wrong_declarations.py rename to tests/test_loadable/test_django/test_wrong_declarations.py index 9c28680..5db1ead 100644 --- a/fixture/test/test_loadable/test_django/test_wrong_declarations.py +++ b/tests/test_loadable/test_django/test_wrong_declarations.py @@ -1,8 +1,10 @@ +from __future__ import absolute_import + from fixture import DjangoFixture, DataSet, style from fixture.exc import LoadError from nose.tools import raises, assert_raises -from util import * +from .util import * from fixture.examples.django_example.app import models class ReviewerData(DataSet): @@ -22,7 +24,7 @@ class frank_herbert: dj_fixture = DjangoFixture(env=models, style=style.NamedDataStyle()) - + def test_wrong_relation_declaration(): assert 'reviewers' in models.Book._meta.get_all_field_names() data = dj_fixture.data(BookData) @@ -37,5 +39,5 @@ class ben: assert_empty(models) data = dj_fixture.data(ReviewerData) assert_raises(LoadError, data.setup) - data.teardown() + data.teardown() diff --git a/fixture/test/test_loadable/test_django/util.py b/tests/test_loadable/test_django/util.py similarity index 100% rename from fixture/test/test_loadable/test_django/util.py rename to tests/test_loadable/test_django/util.py diff --git a/fixture/test/test_loadable/test_google_datastore_loadable.py b/tests/test_loadable_google_datastore.py similarity index 87% rename from fixture/test/test_loadable/test_google_datastore_loadable.py rename to tests/test_loadable_google_datastore.py index 24571fc..19c873b 100644 --- a/fixture/test/test_loadable/test_google_datastore_loadable.py +++ b/tests/test_loadable_google_datastore.py @@ -1,12 +1,15 @@ -import sys import os +import sys +import unittest + from nose.exc import SkipTest from nose.tools import eq_ -import unittest + from fixture import DataSet, TempIO, GoogleDatastoreFixture from fixture.util import reset_log_level -from fixture.test import conf, attr + +from . import conf, attr tmp = TempIO() @@ -24,15 +27,15 @@ def setup(): import yaml import django import antlr3 - + from google.appengine.tools import dev_appserver - + appid = "" - dev_appserver.SetupStubs(appid, + dev_appserver.SetupStubs(appid, clear_datastore = False, # just removes the files when True - datastore_path = tmp.join("datastore.data"), - blobstore_path = tmp.join("blobstore.data"), - history_path = tmp.join("history.data"), + datastore_path = tmp.join("datastore.data"), + blobstore_path = tmp.join("blobstore.data"), + history_path = tmp.join("history.data"), login_url = None) else: raise SkipTest @@ -47,149 +50,148 @@ def clear_datastore(): os.unlink(tmp.join(basename)) class TestSetupTeardown(unittest.TestCase): - + class CategoryData(DataSet): class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + def setUp(self): from google.appengine.ext import db - + class Category(db.Model): - name = db.StringProperty() + name = db.StringProperty() self.Category = Category - + self.fixture = GoogleDatastoreFixture(env={'CategoryData': self.Category}) - + def tearDown(self): clear_datastore() - + @attr(functional=1) def test_setup_then_teardown(self): - + eq_(list(self.Category.all()), []) - + data = self.fixture.data(self.CategoryData) data.setup() - + cats = self.Category.all().order('name') - + eq_(cats[0].name, 'cars') eq_(cats[1].name, 'get free stuff') - + data.teardown() - + eq_(list(self.Category.all()), []) class TestRelationships(unittest.TestCase): - + def setUp(self): from google.appengine.ext import db - + class CategoryData(DataSet): class red: color = 'red' - + class ProductData(DataSet): class red_truck: category = CategoryData.red sale_tag = "Big, Shiny Red Truck" self.ProductData = ProductData - + class Category(db.Model): color = db.StringProperty() self.Category = Category - + class Product(db.Model): category = db.ReferenceProperty(Category) sale_tag = db.StringProperty() self.Product = Product - + self.fixture = GoogleDatastoreFixture(env={ 'CategoryData': self.Category, 'ProductData': self.Product, }) - + def tearDown(self): clear_datastore() - + @attr(functional=1) def test_setup_then_teardown(self): - + eq_(list(self.Category.all()), []) eq_(list(self.Product.all()), []) - + data = self.fixture.data(self.ProductData) data.setup() - + products = self.Product.all() - + eq_(products[0].sale_tag, "Big, Shiny Red Truck") eq_(products[0].category.color, "red") - + data.teardown() - + eq_(list(self.Category.all()), []) eq_(list(self.Product.all()), []) class TestListOfRelationships(unittest.TestCase): - + def setUp(self): from google.appengine.ext import db - + class Author(db.Model): name = db.StringProperty() self.Author = Author - + class Book(db.Model): title = db.StringProperty() authors = db.ListProperty(db.Key) self.Book = Book - + class AuthorData(DataSet): class frank_herbert: name = "Frank Herbert" class brian_herbert: name = "Brian Herbert" - + class BookData(DataSet): class two_worlds: title = "Man of Two Worlds" authors = [AuthorData.frank_herbert, AuthorData.brian_herbert] self.BookData = BookData - + self.fixture = GoogleDatastoreFixture(env={ 'BookData': self.Book, 'AuthorData': self.Author }) - + def tearDown(self): clear_datastore() - + @attr(functional=1) def test_setup_then_teardown(self): - + eq_(list(self.Author.all()), []) eq_(list(self.Book.all()), []) - + data = self.fixture.data(self.BookData) data.setup() - + books = self.Book.all() - + eq_(books[0].title, "Man of Two Worlds") authors = [self.Author.get(k) for k in books[0].authors] - print authors eq_(len(authors), 2) authors.sort(key=lambda a:a.name ) eq_(authors[0].name, "Brian Herbert") eq_(authors[1].name, "Frank Herbert") - + data.teardown() - + eq_(list(self.Author.all()), []) eq_(list(self.Book.all()), []) - + diff --git a/fixture/test/test_loadable/test_sqlobject_loadable.py b/tests/test_loadable_object.py similarity index 86% rename from fixture/test/test_loadable/test_sqlobject_loadable.py rename to tests/test_loadable_object.py index 8885bf2..3e94f79 100644 --- a/fixture/test/test_loadable/test_sqlobject_loadable.py +++ b/tests/test_loadable_object.py @@ -1,16 +1,19 @@ +from __future__ import absolute_import import os, sys + from nose.tools import eq_ from nose.exc import SkipTest -from fixture import SQLObjectFixture -from fixture.test import env_supports + from fixture import ( - SQLObjectFixture, NamedDataStyle, PaddedNameStyle, CamelAndUndersStyle, + SQLObjectFixture, NamedDataStyle, PaddedNameStyle, CamelAndUndersStyle, DataSet) from fixture.dataset import MergedSuperSet -from fixture.test.test_loadable import * from fixture.examples.db.sqlobject_examples import * -from fixture.test import conf + +from . import conf, env_supports +from .test_loadable import * + def setup(): if not env_supports.sqlobject: raise SkipTest @@ -18,21 +21,21 @@ def setup(): class SQLObjectFixtureTest: fixture = SQLObjectFixture( style=( NamedDataStyle() + CamelAndUndersStyle()), - dsn=conf.LITE_DSN, env=globals(), + dsn=conf.LITE_DSN, env=globals(), use_transaction=False, dataclass=MergedSuperSet ) - + def setUp(self, dsn=conf.LITE_DSN): """should load the dataset""" from sqlobject import connectionForURI self.conn = connectionForURI(dsn) self.fixture.connection = self.conn - + from sqlobject import sqlhub sqlhub.processConnection = self.conn - + setup_db(self.conn) - + def tearDown(self): """should unload the dataset.""" conn = self.conn @@ -43,18 +46,18 @@ def tearDown(self): class SQLObjectCategoryTest(SQLObjectFixtureTest): def assert_data_loaded(self, dataset): """assert that the dataset was loaded.""" - eq_(Category.get( dataset.gray_stuff.id).name, + eq_(Category.get( dataset.gray_stuff.id).name, dataset.gray_stuff.name) - eq_(Category.get( dataset.yellow_stuff.id).name, + eq_(Category.get( dataset.yellow_stuff.id).name, dataset.yellow_stuff.name) - + def assert_data_torndown(self): """assert that the dataset was torn down.""" eq_(Category.select().count(), 0) - + class TestSQLObjectCategory( HavingCategoryData, SQLObjectCategoryTest, LoadableTest): - pass + pass class HavingCategoryDataStorable: """mixin that adds data to a LoadableTest.""" @@ -69,7 +72,7 @@ class yellow_stuff: id=2 name='yellow' return [WhateverIWantToCallIt] - + class TestSQLObjectCategoryStorable( HavingCategoryDataStorable, SQLObjectCategoryTest, LoadableTest): pass @@ -78,29 +81,29 @@ class TestSQLObjectCategoryAsDataType( pass class TestSQLObjectPartialLoad( - SQLObjectFixtureTest, LoaderPartialRecoveryTest): + SQLObjectFixtureTest, LoaderPartialRecoveryTest): def assert_partial_load_aborted(self): raise SkipTest("I don't think sqlobject can support this feature") - + # t = self.conn.transaction() # eq_(Category.select(connection=t).count(), 0) - + class SQLObjectFixtureCascadeTest(SQLObjectFixtureTest): def assert_data_loaded(self, dataset): """assert that the dataset was loaded.""" eq_(Offer.get(dataset.free_truck.id).name, dataset.free_truck.name) - + eq_(Product.get( dataset.truck.id).name, dataset.truck.name) - + eq_(Category.get( dataset.cars.id).name, dataset.cars.name) eq_(Category.get( dataset.free_stuff.id).name, dataset.free_stuff.name) - + def assert_data_torndown(self): """assert that the dataset was torn down.""" eq_(Category.select().count(), 0) @@ -111,31 +114,31 @@ class SQLObjectFixtureCascadeTestWithHeavyDB(SQLObjectFixtureCascadeTest): def setUp(self): if not conf.HEAVY_DSN: raise SkipTest - + SQLObjectFixtureCascadeTest.setUp(self, dsn=conf.HEAVY_DSN) class TestSQLObjectFixtureCascade( - HavingOfferProductData, SQLObjectFixtureCascadeTest, + HavingOfferProductData, SQLObjectFixtureCascadeTest, LoadableTest): pass class TestSQLObjectFixtureCascadeWithHeavyDB( - HavingOfferProductData, SQLObjectFixtureCascadeTestWithHeavyDB, + HavingOfferProductData, SQLObjectFixtureCascadeTestWithHeavyDB, LoadableTest): pass class TestSQLObjectFixtureCascadeAsType( - HavingOfferProductAsDataType, SQLObjectFixtureCascadeTest, + HavingOfferProductAsDataType, SQLObjectFixtureCascadeTest, LoadableTest): pass class TestSQLObjectFixtureCascadeAsRef( - HavingReferencedOfferProduct, SQLObjectFixtureCascadeTest, + HavingReferencedOfferProduct, SQLObjectFixtureCascadeTest, LoadableTest): pass class TestSQLObjectFixtureCascadeAsRefInherit( - HavingRefInheritedOfferProduct, SQLObjectFixtureCascadeTest, + HavingRefInheritedOfferProduct, SQLObjectFixtureCascadeTest, LoadableTest): pass class TestSQLObjectFixtureCascadeAsRefInheritWithHeavyDB( - HavingRefInheritedOfferProduct, SQLObjectFixtureCascadeTestWithHeavyDB, + HavingRefInheritedOfferProduct, SQLObjectFixtureCascadeTestWithHeavyDB, LoadableTest): pass - \ No newline at end of file + diff --git a/fixture/test/test_loadable/test_loadable_queue.py b/tests/test_loadable_queue.py similarity index 100% rename from fixture/test/test_loadable/test_loadable_queue.py rename to tests/test_loadable_queue.py diff --git a/fixture/test/test_loadable/test_sqlalchemy_loadable.py b/tests/test_loadable_sqlalchemy.py similarity index 93% rename from fixture/test/test_loadable/test_sqlalchemy_loadable.py rename to tests/test_loadable_sqlalchemy.py index ba46305..5bbd693 100644 --- a/fixture/test/test_loadable/test_sqlalchemy_loadable.py +++ b/tests/test_loadable_sqlalchemy.py @@ -1,17 +1,21 @@ +from __future__ import print_function import unittest + from nose.tools import eq_, raises from nose.exc import SkipTest -from fixture import SQLAlchemyFixture -from fixture.dataset import MergedSuperSet + from fixture import ( SQLAlchemyFixture, NamedDataStyle, CamelAndUndersStyle, TrimmedNameStyle) -from fixture.exc import UninitializedError -from fixture.test import conf, env_supports, attr -from fixture.test.test_loadable import * +from fixture.dataset import MergedSuperSet from fixture.examples.db.sqlalchemy_examples import * +from fixture.exc import UninitializedError from fixture.loadable.sqlalchemy_loadable import * +from . import conf, env_supports, attr +from .test_loadable import * + + def get_transactional_session(): if sa_major < 0.5: session = scoped_session( @@ -57,10 +61,10 @@ def test_cannot_teardown_unloaded_fixture(): class CategoryData(DataSet): class cars: name = 'cars' - + engine = create_engine(conf.LITE_DSN) metadata.bind = engine - + db = SQLAlchemyFixture( env=globals(), engine=metadata.bind @@ -73,39 +77,39 @@ def test_negotiated_medium(): class CategoryData(DataSet): class cars: name = 'cars' - + engine = create_engine(conf.LITE_DSN) metadata.bind = engine metadata.create_all() - + eq_(type(negotiated_medium(categories, CategoryData)), TableMedium) eq_(is_table(categories), True) - + clear_mappers() mapper(Category, categories) - + eq_(type(negotiated_medium(Category, CategoryData)), MappedClassMedium) eq_(is_mapped_class(Category), True) # hmmm # eq_(is_assigned_mapper(Category), False) - + clear_mappers() ScopedSession = scoped_session(get_transactional_session()) - ScopedSession.mapper(Category, categories) - + mapper(Category, categories) + eq_(type(negotiated_medium(Category, CategoryData)), MappedClassMedium) eq_(is_mapped_class(Category), True) eq_(is_assigned_mapper(Category), True) - + @attr(unit=1) -def test_negotiated_medium_05(): +def test_negotiated_medium_05(): if sa_major < 0.5: raise SkipTest("Requires SQLAlchemy >= 0.5") - + class FooData(DataSet): class foo: name = 'foozilator' - + from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() engine = create_engine(conf.LITE_DSN) @@ -114,7 +118,7 @@ class DeclarativeFoo(Base): __tablename__ = 'fixture_declarative_foo' id = Column(Integer, primary_key=True) name = Column(String) - + DeclarativeFoo.metadata.bind = engine DeclarativeFoo.__table__.create() try: @@ -128,7 +132,7 @@ class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + def setUp(self): engine = create_engine(conf.LITE_DSN) metadata.bind = engine @@ -139,25 +143,25 @@ def setUp(self): env={'CategoryData':Category}, engine=metadata.bind ) - + clear_mappers() mapper(Category, categories) - + def tearDown(self): metadata.drop_all() self.session.close() - + @attr(functional=1) def test_setup_then_teardown(self): eq_(self.session.query(Category).all(), []) - + data = self.fixture.data(self.CategoryData) data.setup() clear_session(self.session) cats = self.session.query(Category).order_by('name').all() eq_(cats[0].name, 'cars') eq_(cats[1].name, 'get free stuff') - + data.teardown() clear_session(self.session) eq_(list(self.session.query(Category)), []) @@ -169,46 +173,46 @@ class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + def setUp(self): engine = create_engine(conf.LITE_DSN) metadata.bind = engine # metadata.bind.echo = True metadata.create_all() - + Session = get_transactional_session() self.session = Session() - + # note the lack of explicit binding : self.fixture = SQLAlchemyFixture( env={'CategoryData':Category}, ) - + clear_mappers() # since categories is assigned to metadata, SA should handle binding for us mapper(Category, categories) - + def tearDown(self): # metadata.bind.echo = False metadata.drop_all() self.session.close() - + @attr(functional=1) def test_setup_then_teardown(self): eq_(self.session.query(Category).all(), []) - + data = self.fixture.data(self.CategoryData) data.setup() - + clear_session(self.session) cats = self.session.query(Category).order_by('name').all() eq_(cats[0].name, 'cars') eq_(cats[1].name, 'get free stuff') - + data.teardown() clear_session(self.session) eq_(list(self.session.query(Category)), []) - + class CategoryData(DataSet): class cars: name = 'cars' @@ -230,12 +234,12 @@ class free_spaceship(free_truck): name = "it's a free spaceship" class free_tv(free_spaceship): name = "it's a free TV" - + class TestCascadingReferences(unittest.TestCase): CategoryData = CategoryData ProductData = ProductData OfferData = OfferData - + def setUp(self): if not conf.HEAVY_DSN: raise SkipTest("conf.HEAVY_DSN not defined") @@ -244,15 +248,15 @@ def setUp(self): metadata.create_all() Session = get_transactional_session() self.session = Session() - + self.fixture = SQLAlchemyFixture( env=globals(), engine=metadata.bind, style=NamedDataStyle(), ) - + clear_mappers() - + mapper(Category, categories) mapper(Product, products, properties={ 'category': relation(Category, backref='products') @@ -261,48 +265,48 @@ def setUp(self): 'product': relation(Product, backref='offers'), 'category': relation(Category, backref='offers') }) - + def tearDown(self): metadata.drop_all() self.session.close() clear_mappers() # self.conn.close() metadata.bind.dispose() - + @attr(functional=1) def test_setup_then_teardown(self): eq_(self.session.query(Category).all(), []) eq_(self.session.query(Product).all(), []) eq_(self.session.query(Offer).all(), []) - + data = self.fixture.data(self.OfferData) data.setup() clear_session(self.session) - + cats = self.session.query(Category).order_by('name').all() eq_(cats[0].name, 'cars') eq_(cats[1].name, 'get free stuff') - + prods = self.session.query(Product).order_by('name').all() eq_(prods[0].name, 'truck') eq_(prods[0].category, cats[0]) - + off = self.session.query(Offer).order_by('name').all() eq_(off[0].name, "it's a free TV") eq_(off[0].product, prods[0]) eq_(off[0].category, cats[1]) - + eq_(off[1].name, "it's a free spaceship") eq_(off[1].product, prods[0]) eq_(off[1].category, cats[1]) - + eq_(off[2].name, "it's a free truck") eq_(off[2].product, prods[0]) eq_(off[2].category, cats[1]) - + data.teardown() clear_session(self.session) - + eq_(self.session.query(Category).all(), []) eq_(self.session.query(Product).all(), []) eq_(self.session.query(Offer).all(), []) @@ -313,7 +317,7 @@ class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + def setUp(self): self.engine = create_engine(conf.LITE_DSN) # self.conn = self.engine.connect() @@ -327,37 +331,37 @@ def setUp(self): env={'CategoryData':Category}, engine=metadata.bind ) - + clear_mappers() mapper(Category, categories) - + def tearDown(self): metadata.drop_all() self.session.close() - + @attr(functional=1) def test_setup_then_teardown(self): eq_(self.session.query(Category).all(), []) - + data = self.fixture.data(self.CategoryData) data.setup() clear_session(self.session) - + cats = self.session.query(Category).order_by('name').all() eq_(cats[0].name, 'cars') eq_(cats[1].name, 'get free stuff') - + # simulate the application running into some kind of error: new_cat = Category() new_cat.name = "doomed to non-existance" save_session(self.session, new_cat) self.session.rollback() self.ScopedSession.remove() - + data.teardown() clear_session(self.session) - - print [(c.id, c.name) for c in self.session.query(Category).all()] + + print([(c.id, c.name) for c in self.session.query(Category).all()]) eq_(list(self.session.query(Category)), []) class TestScopedSessions(unittest.TestCase): @@ -366,7 +370,7 @@ class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + def setUp(self): self.engine = create_engine(conf.LITE_DSN) metadata.bind = self.engine @@ -377,25 +381,25 @@ def setUp(self): env={'CategoryData':Category}, engine=metadata.bind ) - + clear_mappers() mapper(Category, categories) - + def tearDown(self): metadata.drop_all() self.session.close() - + @attr(functional=1) def test_setup_then_teardown(self): eq_(self.session.query(Category).all(), []) - + data = self.fixture.data(self.CategoryData) data.setup() clear_session(self.session) cats = self.session.query(Category).order_by('name').all() eq_(cats[0].name, 'cars') eq_(cats[1].name, 'get free stuff') - + data.teardown() clear_session(self.session) eq_(list(self.session.query(Category)), []) @@ -406,48 +410,48 @@ class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + def setUp(self): if not env_supports.elixir: raise SkipTest("elixir module not found") import elixir - + self.engine = create_engine(conf.LITE_DSN) metadata.bind = self.engine metadata.create_all() - + class CategoryEntity(elixir.Entity): elixir.using_options(tablename=str(categories)) # save_on_init IS VERY IMPORTANT elixir.using_mapper_options(save_on_init=False) - + self.CategoryEntity = CategoryEntity - + self.fixture = SQLAlchemyFixture( env={'CategoryData':CategoryEntity}, engine=metadata.bind ) - + elixir.metadata.bind = self.engine elixir.setup_all() - + def tearDown(self): metadata.drop_all() - + @attr(functional=1) def test_setup_then_teardown(self): try: from elixir import session as elixir_session except ImportError: from elixir import objectstore as elixir_session - + eq_(len(elixir_session.query(self.CategoryEntity).all()), 0) - + data = self.fixture.data(self.CategoryData) data.setup() - + eq_(len(elixir_session.query(self.CategoryEntity).all()), 2) - + data.teardown() eq_(elixir_session.query(self.CategoryEntity).all(), []) @@ -457,7 +461,7 @@ class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + def setUp(self): self.engine = create_engine(conf.LITE_DSN) metadata.bind = self.engine @@ -469,26 +473,26 @@ def setUp(self): env={'CategoryData':categories}, engine=metadata.bind ) - + clear_mappers() mapper(Category, categories) - + def tearDown(self): metadata.drop_all() self.session.close() - + @attr(functional=1) def test_setup_then_teardown(self): eq_(self.session.query(Category).all(), []) - + data = self.fixture.data(self.CategoryData) data.setup() clear_session(self.session) - + cats = self.session.execute(categories.select()).fetchall() eq_(cats[0].name, 'cars') eq_(cats[1].name, 'get free stuff') - + data.teardown() clear_session(self.session) eq_(self.session.execute(categories.select()).fetchall(), []) @@ -499,33 +503,33 @@ class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + def setUp(self): if not conf.HEAVY_DSN: raise SkipTest("conf.HEAVY_DSN not defined") - + self.litemeta = MetaData(bind=conf.LITE_DSN) LiteSession = sessionmaker(bind=self.litemeta.bind) self.litesession = LiteSession() - + heavymeta = MetaData(bind=create_engine(conf.HEAVY_DSN)) HeavySession = sessionmaker(bind=heavymeta.bind) self.heavysession = HeavySession() - + # this creates the default bind: metadata.bind = heavymeta.bind metadata.create_all() - - # this creates the table in mem but does not bind + + # this creates the table in mem but does not bind # the connection to the table: categories.create(bind=self.litemeta.bind) - + clear_mappers() mapper(Category, categories) - + def tearDown(self): metadata.drop_all() - + def test_with_engine_connection(self): fixture = SQLAlchemyFixture( # maps to a table object : @@ -535,15 +539,15 @@ def test_with_engine_connection(self): ) data = fixture.data(CategoryData) data.setup() - + rs = self.heavysession.query(Category).all() - assert rs==[], "unexpected records in HEAVY_DSN db: %s" % rs - + assert rs==[], "unexpected records in HEAVY_DSN db: %s" % rs + rs = self.litesession.query(Category).all() eq_(len(rs), 2) - + data.teardown() - + rs = self.litesession.query(Category).all() eq_(len(rs), 0) @@ -562,22 +566,22 @@ def test_fixture_can_be_disposed(): env={'CategoryData':Category}, engine=metadata.bind ) - + class CategoryData(DataSet): class cars: name = 'cars' class free_stuff: name = 'get free stuff' - + clear_mappers() mapper(Category, categories) - + data = fixture.data(CategoryData) data.setup() data.teardown() - + fixture.dispose() - + # cannot use fixture anymore : try: data.setup() @@ -585,7 +589,7 @@ class free_stuff: pass else: assert False, "data.setup() did not raise InvalidRequestError after connection was disposed" - + # a new instance of everything is needed : metadata.create_all() fixture = SQLAlchemyFixture( @@ -595,7 +599,7 @@ class free_stuff: data = fixture.data(CategoryData) data.setup() data.teardown() - + @attr(unit=True) def test_SQLAlchemyFixture_configured_with_bound_session_and_conn(): class StubConnection: @@ -620,7 +624,7 @@ def create_transaction(self): ### was using this to work around postgres deadlocks... -# if dsn.startswith('postgres'): +# if dsn.startswith('postgres'): # # postgres will put everything in a transaction, even after a commit, # # and it seems that this makes it near impossible to drop tables after a test # # (deadlock), so let's fix that... diff --git a/fixture/test/test_loadable/test_storm_loadable.py b/tests/test_loadable_storm.py similarity index 88% rename from fixture/test/test_loadable/test_storm_loadable.py rename to tests/test_loadable_storm.py index 68ff40b..93d1a86 100644 --- a/fixture/test/test_loadable/test_storm_loadable.py +++ b/tests/test_loadable_storm.py @@ -3,35 +3,30 @@ from nose.tools import eq_ from nose.exc import SkipTest from fixture import StormFixture -from fixture.test import env_supports from fixture import ( - StormFixture, NamedDataStyle, PaddedNameStyle, CamelAndUndersStyle, + StormFixture, NamedDataStyle, PaddedNameStyle, CamelAndUndersStyle, DataSet) from fixture.dataset import MergedSuperSet -from fixture.test.test_loadable import * from fixture.examples.db.storm_examples import * -from fixture.test import conf +from . import conf, env_supports +from .test_loadable import * - - - from fixture.util import start_debug, stop_debug #start_debug("fixture.loadable") #start_debug("fixture.loadable.tree") #start_debug("fixture.loadable.storm") - def setup(): if not env_supports.storm: raise SkipTest class StormFixtureTest: fixture = StormFixture( style=( NamedDataStyle() + CamelAndUndersStyle()), - dsn=conf.LITE_DSN, env=globals(), + dsn=conf.LITE_DSN, env=globals(), use_transaction=True, dataclass=MergedSuperSet ) - + def setUp(self, dsn=conf.LITE_DSN): """should load the dataset""" from storm.uri import URI @@ -40,9 +35,9 @@ def setUp(self, dsn=conf.LITE_DSN): #debug(1) self.store = Store(create_database(URI(dsn))) self.fixture.store = self.store - + setup_db(self.store) - + def tearDown(self): """should unload the dataset.""" store = self.store @@ -53,18 +48,18 @@ def tearDown(self): class StormCategoryTest(StormFixtureTest): def assert_data_loaded(self, dataset): """assert that the dataset was loaded.""" - eq_(self.store.get(Category, dataset.gray_stuff.id).name, + eq_(self.store.get(Category, dataset.gray_stuff.id).name, dataset.gray_stuff.name) - eq_(self.store.get(Category, dataset.yellow_stuff.id).name, + eq_(self.store.get(Category, dataset.yellow_stuff.id).name, dataset.yellow_stuff.name) - + def assert_data_torndown(self): """assert that the dataset was torn down.""" eq_(self.store.find(Category).count(), 0) - + class TestStormCategory( HavingCategoryData, StormCategoryTest, LoadableTest): - pass + pass class HavingCategoryDataStorable: """mixin that adds data to a LoadableTest.""" @@ -79,7 +74,7 @@ class yellow_stuff: id=2 name='yellow' return [WhateverIWantToCallIt] - + class TestStormCategoryStorable( HavingCategoryDataStorable, StormCategoryTest, LoadableTest): pass @@ -88,29 +83,29 @@ class TestStormCategoryAsDataType( pass class TestStormPartialLoad( - StormFixtureTest, LoaderPartialRecoveryTest): + StormFixtureTest, LoaderPartialRecoveryTest): def assert_partial_load_aborted(self): raise SkipTest("I don't think storm can support this feature") - + # t = self.conn.transaction() # eq_(Category.select(connection=t).count(), 0) - + class StormFixtureCascadeTest(StormFixtureTest): def assert_data_loaded(self, dataset): """assert that the dataset was loaded.""" eq_(self.store.get(Offer,dataset.free_truck.id).name, dataset.free_truck.name) - + eq_(self.store.get(Product, dataset.truck.id).name, dataset.truck.name) - + eq_(self.store.get(Category, dataset.cars.id).name, dataset.cars.name) eq_(self.store.get(Category, dataset.free_stuff.id).name, dataset.free_stuff.name) - + def assert_data_torndown(self): """assert that the dataset was torn down.""" eq_(self.store.find(Category).count(), 0) @@ -121,31 +116,31 @@ class StormFixtureCascadeTestWithHeavyDB(StormFixtureCascadeTest): def setUp(self): if not conf.HEAVY_DSN: raise SkipTest - + StormFixtureCascadeTest.setUp(self, dsn=conf.HEAVY_DSN) class TestStormFixtureCascade( - HavingOfferProductData, StormFixtureCascadeTest, + HavingOfferProductData, StormFixtureCascadeTest, LoadableTest): pass class TestStormFixtureCascadeWithHeavyDB( - HavingOfferProductData, StormFixtureCascadeTestWithHeavyDB, + HavingOfferProductData, StormFixtureCascadeTestWithHeavyDB, LoadableTest): pass class TestStormFixtureCascadeAsType( - HavingOfferProductAsDataType, StormFixtureCascadeTest, + HavingOfferProductAsDataType, StormFixtureCascadeTest, LoadableTest): pass class TestStormFixtureCascadeAsRef( - HavingReferencedOfferProduct, StormFixtureCascadeTest, + HavingReferencedOfferProduct, StormFixtureCascadeTest, LoadableTest): pass class TestStormFixtureCascadeAsRefInherit( - HavingRefInheritedOfferProduct, StormFixtureCascadeTest, + HavingRefInheritedOfferProduct, StormFixtureCascadeTest, LoadableTest): pass class TestStormFixtureCascadeAsRefInheritWithHeavyDB( - HavingRefInheritedOfferProduct, StormFixtureCascadeTestWithHeavyDB, + HavingRefInheritedOfferProduct, StormFixtureCascadeTestWithHeavyDB, LoadableTest): pass - +