diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..fa2c27b --- /dev/null +++ b/.travis.yml @@ -0,0 +1,14 @@ +language: python +python: + - "2.6" + - "2.7" +env: +# - DJANGO=1.3.4 +# - DJANGO=https://github.com/django/django/zipball/master +install: +# - pip install -q Django==$DJANGO --use-mirrors + - pip install -e . --use-mirrors +script: + - python setup_test_buildout.py + - ./bin/buildout + - ./bin/test-fixture diff --git a/fixture/command/generate/generate_sqlalchemy.py b/fixture/command/generate/generate_sqlalchemy.py index 8c703f3..fe359a0 100644 --- a/fixture/command/generate/generate_sqlalchemy.py +++ b/fixture/command/generate/generate_sqlalchemy.py @@ -1,16 +1,18 @@ -import sys, inspect +import sys +import inspect from fixture.command.generate import ( - DataHandler, register_handler, FixtureSet, NoData, UnsupportedHandler) + DataHandler, register_handler, FixtureSet, NoData, UnsupportedHandler, MisconfiguredHandler) from fixture import SQLAlchemyFixture try: import sqlalchemy except ImportError: sqlalchemy = False + class TableEnv(object): """a shared environment of sqlalchemy Table instances. - + can be initialized with python paths to objects or objects themselves """ def __init__(self, *objects): @@ -25,7 +27,7 @@ def __init__(self, *objects): try: if "." in modpath: cut = modpath.rfind(".") - names = [modpath[cut+1:]] + names = [modpath[cut + 1:]] parent = __import__( modpath[0:cut], globals(), locals(), names) module = getattr(parent, names[0]) @@ -42,10 +44,10 @@ def __init__(self, *objects): if module is None: module = inspect.getmodule(obj) self._find_objects(obj, module) - + def __contains__(self, key): return key in self.tablemap - + def __getitem__(self, table): try: return self.tablemap[table] @@ -56,10 +58,10 @@ def __getitem__(self, table): "(looked in: %s) You might need to add " "--env='path.to.module'?" % ( table, ", ".join([repr(p) for p in self.objects]))), tb - + def _find_objects(self, obj, module): from sqlalchemy.schema import Table - + # get dict key/vals or dir() through object ... if not hasattr(obj, 'items'): def getitems(): @@ -70,7 +72,16 @@ def getitems(): for name, o in getitems(): if isinstance(o, Table): self.add_table(o, name=name, module=module) - + if self._is_sa_mapped(o): + self.add_table(o.__table__, name=o.__name__, module=module) + + def _is_sa_mapped(self, cls): + try: + sqlalchemy.orm.util.class_mapper(cls) + return True + except: + return False + def add_table(self, table_obj, name=None, module=None): if not name: # sqlalchemy 0.4 and ?? @@ -78,29 +89,29 @@ def add_table(self, table_obj, name=None, module=None): self.tablemap.setdefault(table_obj, {}) self.tablemap[table_obj]['name'] = name self.tablemap[table_obj]['module'] = module - + def get_real_table(self, table): return getattr(self[table]['module'], self[table]['name']) class SQLAlchemyHandler(DataHandler): """handles genration of fixture code from a sqlalchemy data source.""" - + loadable_fxt_class = SQLAlchemyFixture - + class RecordSetAdapter(object): - """adapts a sqlalchemy record set object for use in a + """adapts a sqlalchemy record set object for use in a SQLAlchemyFixtureSet.""" columns = None def __init__(self, obj): raise NotImplementedError("not a concrete implementation") - + def primary_key_from_instance(self, data): raise NotImplementedError - + def __init__(self, object_path, options, connection=None, **kw): from sqlalchemy import MetaData, create_engine from sqlalchemy.orm import sessionmaker, scoped_session - + self.engine = None self.connection = connection super(SQLAlchemyHandler, self).__init__(object_path, options, **kw) @@ -108,12 +119,12 @@ def __init__(self, object_path, options, connection=None, **kw): if not self.options.dsn: raise MisconfiguredHandler( "--dsn option is required by %s" % self.__class__) - + self.engine = create_engine(self.options.dsn) self.connection = self.engine self.meta = MetaData(bind=self.engine) ################################################ - if self.options.dsn.startswith('postgres'): + if self.options.dsn.startswith('postgres'): # postgres will put everything in a transaction, even after a commit, # and it seems that this makes it near impossible to drop tables after a test # (deadlock), so let's fix that... @@ -121,30 +132,30 @@ def __init__(self, object_path, options, connection=None, **kw): self.connection.raw_connection().set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) ################################################ - - Session = scoped_session(sessionmaker(autoflush=True, transactional=False, bind=self.engine)) + + Session = scoped_session(sessionmaker(autoflush=True, bind=self.engine)) self.session = Session() - + self.env = TableEnv(*[self.obj.__module__] + self.options.env) - + def add_fixture_set(self, fset): t = self.env[fset.obj.table] self.template.add_import("from %s import %s" % ( - t['module'].__name__, t['name'])) - + t['module'].__name__, t['name'])) + def begin(self, *a,**kw): DataHandler.begin(self, *a,**kw) - + def commit(self): pass - + def rollback(self): pass - + def find(self, idval): self.rs = [self.obj.get(idval)] return self.rs - + def findall(self, query=None): """gets record set for query.""" session = self.session @@ -155,11 +166,11 @@ def findall(self, query=None): if not self.rs.count(): raise NoData("no data for query \"%s\" on %s, handler=%s" % (query, self.obj, self.__class__)) return self.rs - + @staticmethod def recognizes(object_path, obj=None): """returns True if obj is not None. - + this method is just a starting point for sqlalchemy handlers. """ if not sqlalchemy: @@ -167,10 +178,10 @@ def recognizes(object_path, obj=None): if obj is None: return False return True - + def sets(self): """yields FixtureSet for each row in SQLObject.""" - + for row in self.rs: yield SQLAlchemyFixtureSet(row, self.obj, self.connection, self.env, adapter=self.RecordSetAdapter) @@ -178,42 +189,42 @@ def sets(self): class SQLAlchemyMappedClassBase(SQLAlchemyHandler): class RecordSetAdapter(SQLAlchemyHandler.RecordSetAdapter): def __init__(self, obj): - self.columns = obj.c - + self.columns = obj.__table__.columns + # could grab this from the Handler : - from sqlalchemy.orm.mapper import object_mapper + from sqlalchemy.orm import object_mapper self.mapper = object_mapper(obj()) - - if self.mapper.local_table: + + if self.mapper.local_table is not None: self.table = self.mapper.local_table elif self.mapper.select_table: self.table = self.mapper.select_table else: raise LookupError( - "not sure how to get a table from mapper %s" % + "not sure how to get a table from mapper %s" % self.mapper) - + self.id_attr = self.table.primary_key.columns.keys() - + def primary_key_from_instance(self, data): return self.mapper.primary_key_from_instance(data) - + def __init__(self, *args, **kw): super(SQLAlchemyMappedClassBase, self).__init__(*args, **kw) - + from sqlalchemy.orm.mapper import class_mapper self.mapper = class_mapper(self.obj) - - if self.mapper.local_table: + + if self.mapper.local_table is not None: self.table = self.mapper.local_table elif self.mapper.select_table: self.table = self.mapper.select_table else: raise LookupError( - "not sure how to get a table from mapper %s" % + "not sure how to get a table from mapper %s" % self.mapper) - - def find(self, idval): + + def find(self, idval): q = self.session.query(self.obj) primary_keys = self.table.primary_key.columns.keys() # I think this is 0.4 only try: @@ -226,10 +237,10 @@ def find(self, idval): table_cols = self.table.c for i, keyname in enumerate(primary_keys): q = q.filter(getattr(table_cols, keyname) == idval[i]) - + self.rs = q.all() return self.rs - + def findall(self, query=None): """gets record set for query.""" session = self.session @@ -241,36 +252,36 @@ def findall(self, query=None): raise NoData("no data for query \"%s\" on %s, handler=%s" % (query, self.obj, self.__class__)) return self.rs -## NOTE: the order that handlers are registered in is important for discovering +## NOTE: the order that handlers are registered in is important for discovering ## sqlalchemy types... -class SQLAlchemySessionMapperHandler(SQLAlchemyMappedClassBase): +class SQLAlchemySessionMapperHandler(SQLAlchemyMappedClassBase): """handles a scoped session mapper - + that is, one created with sqlalchemy.orm.scoped_session(sessionmaker(...)).mapper() - - """ - + + """ + @staticmethod def recognizes(object_path, obj=None): if not SQLAlchemyHandler.recognizes(object_path, obj=obj): return False - + if not SQLAlchemyMappedClassHandler.recognizes(object_path, obj=obj): return False - + # OK, so it is a mapped class - if (hasattr(obj, 'query') and - getattr(obj.query, '__module__', '').startswith('sqlalchemy')): - # sort of hoky but 0.5 proxies query and + if (hasattr(obj, 'query') and + getattr(obj.query, '__module__', '').startswith('sqlalchemy')): + # sort of hoky but 0.5 proxies query and # query.mapper so we can't check types return True - + return False - + register_handler(SQLAlchemySessionMapperHandler) -class SQLAlchemyTableHandler(SQLAlchemyHandler): +class SQLAlchemyTableHandler(SQLAlchemyHandler): class RecordSetAdapter(SQLAlchemyHandler.RecordSetAdapter): def __init__(self, obj): self.table = obj @@ -279,18 +290,18 @@ def __init__(self, obj): if len(keys) != 1: raise ValueError("unsupported primary key type %s" % keys) self.id_attr = keys[0].key - + def primary_key_from_instance(self, data): key_str = [] for k in self.table.primary_key: key_str.append(str(getattr(data, k.key))) return "_".join(key_str) - + @staticmethod def recognizes(object_path, obj=None): if not SQLAlchemyHandler.recognizes(object_path, obj=obj): return False - + from sqlalchemy.schema import Table if isinstance(obj, Table): raise NotImplementedError( @@ -298,18 +309,18 @@ def recognizes(object_path, obj=None): "Please use a mapped class or mapper object instead. Or, " "consider submitting a patch to support this.") return True - + return False - + register_handler(SQLAlchemyTableHandler) class SQLAlchemyMappedClassHandler(SQLAlchemyMappedClassBase): - + @staticmethod def recognizes(object_path, obj=None): if not SQLAlchemyHandler.recognizes(object_path, obj=obj): return False - + from sqlalchemy.orm import class_mapper try: class_mapper(obj) @@ -318,17 +329,16 @@ def recognizes(object_path, obj=None): return False else: return True - + return False - + register_handler(SQLAlchemyMappedClassHandler) class SQLAlchemyFixtureSet(FixtureSet): """a fixture set for a sqlalchemy record set.""" - + def __init__(self, data, obj, connection, env, adapter=None): - # print data, model FixtureSet.__init__(self, data) self.env = env self.connection = connection @@ -340,19 +350,23 @@ def __init__(self, data, obj, connection, env, adapter=None): # if self.obj.table not in self.env: # self.env.add_table(self.obj.table) self.primary_key = None - + self.data_dict = {} - for col in self.obj.columns: + if getattr(self.obj, 'mapper', False): + columns = self.obj.mapper.columns._data + else: + columns = {col.name: col for col in self.obj.columns} + + for col_name, col in columns.iteritems(): sendkw = {} for fk in col.foreign_keys: sendkw['foreign_key'] = fk - - val = self.get_col_value(col.name, **sendkw) + val = self.get_col_value(col_name, **sendkw) self.data_dict[col.name] = val - + def attr_to_db_col(self, col): return col.name - + def get_col_value(self, colname, foreign_key=None): """transform column name into a value or a new set if it's a foreign key (recursion). @@ -362,31 +376,28 @@ def get_col_value(self, colname, foreign_key=None): # this means that we are in a NULL column or foreign key # which could be perfectly legal. return None - + if foreign_key: - from sqlalchemy.ext.assignmapper import assign_mapper - from sqlalchemy.ext.sqlsoup import class_for_table - table = foreign_key.column.table stmt = table.select(getattr(table.c, foreign_key.column.key)==value) rs = self.connection.execute(stmt) - + # adapter is always table adapter here, since that's # how we obtain foreign keys... subset = SQLAlchemyFixtureSet( rs.fetchone(), table, self.connection, self.env, adapter=SQLAlchemyTableHandler.RecordSetAdapter) return subset - + return value - + def get_id_attr(self): return self.obj.id_attr - + def obj_id(self): return self.env[self.obj.table]['name'] - + def set_id(self): """returns id of this set (the primary key value).""" compid = self.obj.primary_key_from_instance(self.data) - return "_".join([str(i) for i in compid]) \ No newline at end of file + return "_".join([str(i) for i in compid]) diff --git a/fixture/command/generate/generate_sqlobject.py b/fixture/command/generate/generate_sqlobject.py index 87dd530..7268af8 100644 --- a/fixture/command/generate/generate_sqlobject.py +++ b/fixture/command/generate/generate_sqlobject.py @@ -4,21 +4,21 @@ from fixture.style import camel_to_under from fixture import SQLObjectFixture from fixture.command.generate import ( - DataHandler, FixtureSet, register_handler, code_str, + DataHandler, FixtureSet, register_handler, code_str, UnsupportedHandler, MisconfiguredHandler, NoData ) - + try: import sqlobject except ImportError: sqlobject = None class SQLObjectHandler(DataHandler): - + loadable_fxt_class = SQLObjectFixture - + def __init__(self, *a,**kw): DataHandler.__init__(self, *a,**kw) - from sqlobject import sqlhub, connectionForURI + from sqlobject import connectionForURI if self.options.dsn: self.connection = connectionForURI(self.options.dsn) else: @@ -28,28 +28,28 @@ def __init__(self, *a,**kw): raise NotImplementedError( "sqlobject is not using --env; perhaps we just need to import " "the envs so that findClass knows about its objects?") - + def add_fixture_set(self, fset): from sqlobject.classregistry import findClass so_class = fset.obj_id() kls = findClass(so_class) # this maybe isn't very flexible ... self.template.add_import("from %s import %s" % ( - kls.__module__, so_class)) - + kls.__module__, so_class)) + def find(self, idval): self.rs = [self.obj.get(idval)] - + def findall(self, query): - """gets record set for query.""" + """gets record set for query.""" self.rs = self.obj.select(query, connection=self.connection) if not self.rs.count(): raise NoData("no data for query \"%s\" on object %s" % ( query, self.obj)) - + def fxt_type(self): return 'SOFixture' - + @staticmethod def recognizes(object_path, obj=None): """returns True if obj is a SQLObject class. @@ -61,18 +61,18 @@ def recognizes(object_path, obj=None): from sqlobject.declarative import DeclarativeMeta if type(obj) is DeclarativeMeta and obj.__name__ not in ( 'SQLObject', 'sqlmeta', 'ManyToMany', 'OneToMany'): - return True - + return True + def sets(self): """yields FixtureSet for each row in SQLObject.""" for row in self.rs: yield SQLObjectFixtureSet(row, self.obj, connection=self.connection) - + register_handler(SQLObjectHandler) class SQLObjectFixtureSet(FixtureSet): """a fixture set for a SQLObject row.""" - + def __init__(self, data, model, connection=None): FixtureSet.__init__(self, data) self.connection = connection @@ -80,29 +80,29 @@ def __init__(self, data, model, connection=None): self.meta = model.sqlmeta self.foreign_key_class = {} self.primary_key = None - + self.understand_columns() - + # NOTE: primary keys are not included in columnList # so we need to find it ... - + cols = [self.meta.style.idForTable(self.meta.table)] cols.extend([self.attr_to_db_col(c) for c in self.meta.columnList]) - - # even though self.meta.idName tells us the real col name, when + + # even though self.meta.idName tells us the real col name, when # accessing object properties sqlobject wants you to say object.id, # for which it proxies the real id column name vals = [getattr(self.data, 'id')] vals.extend([self.get_col_value(c.name) for c in self.meta.columnList]) - + self.data_dict = dict(zip(cols, vals)) - + def attr_to_db_col(self, col): if col.dbName is not None: return col.dbName else: return self.meta.style.pythonAttrToDBColumn(col.name) - + def get_col_value(self, colname): """transform column name into a value or a new set if it's a foreign key (recursion). @@ -113,54 +113,54 @@ def get_col_value(self, colname): # this means that we are in a NULL foreign key # which could be perfectly legal. return None - + if self.foreign_key_class.has_key(colname): model = findClass(self.foreign_key_class[colname]) rs = model.get(value, connection=self.connection) return SQLObjectFixtureSet(rs, model, connection=self.connection) else: return value - + def get_id_attr(self): meta = self.meta id_attr = meta.style.idForTable(meta.table) return id_attr - + def mk_var_name(self): """returns a variable name for the instance of the fixture class. """ fxt_cls_name = self.obj_id() return "_".join([camel_to_under(n) for n in fxt_cls_name.split('_')]) - + def set_id(self): """returns id of this set (the primary key value).""" return getattr(self.data, 'id') # id is a magic property in sqlobject, see __init__ - + def understand_columns(self): """get an understanding of what columns are what, foreign keys, etc.""" from sqlobject.col import SOForeignKey - + for name,col in self.meta.columns.items(): if isinstance(col, SOForeignKey): self.foreign_key_class[col.name] = col.foreignKey - + #### I don't know if this is necessary anymore... # if sqlobject: # # OUCH! # # prepare for sqlobject monkey patch :( ... -# # this is so that foreign key lookups work right when -# # there are multiple schemas having the same table -# # (perfectly legal, but sqlobject was only finding the primary key +# # this is so that foreign key lookups work right when +# # there are multiple schemas having the same table +# # (perfectly legal, but sqlobject was only finding the primary key # # from the first schema) # import re # def columnsFromSchema(self, tableName, soClass): -# +# # keyQuery = """ # SELECT pg_catalog.pg_get_constraintdef(oid) as condef # FROM pg_catalog.pg_constraint r # WHERE r.conrelid = %s::regclass AND r.contype = 'f'""" -# +# # colQuery = """ # SELECT a.attname, # pg_catalog.format_type(a.atttypid, a.atttypmod), a.attnotnull, @@ -170,8 +170,8 @@ def understand_columns(self): # WHERE a.attrelid =%s::regclass # AND a.attnum > 0 AND NOT a.attisdropped # ORDER BY a.attnum""" -# -# # kumar: add limit 1 to get primary key for +# +# # kumar: add limit 1 to get primary key for # # first rel in schema search path # primaryKeyQuery = """ # SELECT pg_index.indisprimary, @@ -184,17 +184,17 @@ def understand_columns(self): # AND pg_index.indisprimary # LIMIT 1 # """ -# +# # keyData = self.queryAll(keyQuery % self.sqlrepr(tableName)) # keyRE = re.compile(r"\((.+)\) REFERENCES (.+)\(") # keymap = {} -# +# # for (condef,) in keyData: # match = keyRE.search(condef) # if match: # field, reftable = match.groups() # keymap[field] = reftable.capitalize() -# +# # primaryData = self.queryAll(primaryKeyQuery % self.sqlrepr(tableName)) # primaryRE = re.compile(r'CREATE .*? USING .* \((.+?)\)') # primaryKey = None @@ -207,7 +207,7 @@ def understand_columns(self): # if primaryKey.startswith('"'): # assert primaryKey.endswith('"') # primaryKey = primaryKey[1:-1] -# +# # colData = self.queryAll(colQuery % self.sqlrepr(tableName)) # results = [] # if self.unicodeCols: @@ -234,4 +234,4 @@ def understand_columns(self): # from warnings import warn # warn("monkey patching %s for multiple schema support" % ( # pgconnection.PostgresConnection.columnsFromSchema)) -# pgconnection.PostgresConnection.columnsFromSchema = columnsFromSchema \ No newline at end of file +# pgconnection.PostgresConnection.columnsFromSchema = columnsFromSchema diff --git a/fixture/command/generate/template.py b/fixture/command/generate/template.py index 03ea279..40cc06e 100644 --- a/fixture/command/generate/template.py +++ b/fixture/command/generate/template.py @@ -2,7 +2,6 @@ """templates that generate fixture modules.""" from fixture.command.generate import code_str -import pprint def _addto(val, list_): if val not in list_: @@ -13,19 +12,19 @@ def __init__(self): self.templates = [] self.lookup = {} self._default = None - + def __iter__(self): for tpl in self.templates: yield tpl - + def find(self, name): return self.templates[self.lookup[name]] - + def default(self): if self._default is None: raise LookupError("no default template has been set") return self.templates[self._default] - + def register(self, template, default=False): name = template.__class__.__name__ if name in self.lookup: @@ -34,10 +33,10 @@ def register(self, template, default=False): self.templates.append(template) id = len(self.templates)-1 self.lookup[name] = id - + if default: self._default = id - + templates = _TemplateRegistry() class Template(object): @@ -51,18 +50,18 @@ def __repr__(self): k, repr(v))) s[-1] = s[-1] + ")" return "\n".join(s) - + class tuple(tuple): def __repr__(self): s = ["("] for item in self: s.append(" %s," % repr(item)) return "\n".join(s) + ")" - + class DataDef: def __init__(self): self.data_header = [] # vars at top of data() method - + def add_header(self, hdr): if hdr not in self.data_header: self.data_header.append(hdr) @@ -71,32 +70,32 @@ def meta(self, fxt_class): """returns list of lines to add to the fixture class's meta. """ return ['pass'] - + class data(tuple): pass - + metabase = """ class metabase: pass""" - + fixture = None - + def __init__(self): self.import_header = [] # lines of import statements self.meta_header = [] # lines of attributes for inner meta class def __repr__(self): return "'%s'" % self.__class__.__name__ - + def add_import(self, _import): _addto(_import, self.import_header) - + def begin(self): pass - + def header(self, handler): return self.metabase - + def render(self, tpl): if self.fixture is None: raise NotImplementedError @@ -107,39 +106,39 @@ def is_template(obj): class fixture(Template): """renders DataSet objects for the fixture interface.""" - + class DataDef(Template.DataDef): def __init__(self, *a,**kw): Template.DataDef.__init__(self, *a,**kw) self.requires = [] - + def add_reference(self, fxt_class, fxt_var=None): _addto(code_str(fxt_class), self.requires) - + def fset_to_attr(self, fset, fxt_class): # do we need to check for MergedSuperSet ? # attribute needs key only return code_str("%s.%s.ref(%s)" % ( fxt_class, fset.mk_key(), repr(fset.get_id_attr()))) - + def meta(self, fxt_class): return "" # if len(self.requires): # return ["requires = %s" % str(tuple(self.requires))] # else: # return ["pass"] - + fixture = """ class %(fxt_class)s(DataSet): %(meta)s\ %(data)s""" - + metabase = "" - + def begin(self): self.add_import('import datetime') self.add_import("from fixture import DataSet") - + class data(object): def __init__(self, elements): self.elements = elements @@ -147,10 +146,10 @@ def __repr__(self): o = [] for class_, dict_ in self.elements: o.append(" class %s:" % class_) - for k,v in dict_.iteritems(): + for k, v in dict_.iteritems(): o.append(" %s = %s" % (k,repr(v))) return "\n".join(o) - + def header(self, handler): return "\n".join(Template.header(self, handler)) @@ -163,16 +162,16 @@ class DataDef(Template.DataDef): def add_reference(self, fxt_class, fxt_var=None): self.add_header('r = self.meta.req') self.add_header("r.%s = %s()" % (fxt_var, fxt_class)) - + def fset_to_attr(self, fset, fxt_class): return code_str("r.%s.%s.%s" % ( fset.mk_var_name(), fset.mk_key(), fset.get_id_attr())) - + def meta(self, fxt_class): """returns list of lines to add to the fixture class's meta. """ return ["so_class = %s" % fxt_class] - + fixture = """ class %(fxt_class)s(%(fxt_type)s): class meta(metabase): @@ -180,9 +179,9 @@ class meta(metabase): def data(self): %(data_header)s\ return %(data)s""" - + def begin(self): self.add_import('import datetime') self.add_import('from testtools.fixtures import SOFixture') - -templates.register(testtools()) \ No newline at end of file + +templates.register(testtools()) diff --git a/fixture/dataset/converter.py b/fixture/dataset/converter.py index 00eb734..2694038 100644 --- a/fixture/dataset/converter.py +++ b/fixture/dataset/converter.py @@ -3,7 +3,6 @@ import datetime import decimal -import types from fixture.dataset import DataSet, DataRow json = None try: @@ -15,36 +14,39 @@ except ImportError: pass + def _obj_items(obj): for name in dir(obj): if name.startswith('__'): continue yield name, getattr(obj, name) + def default_json_converter(obj): """converts obj to a value safe for JSON serialization.""" if isinstance(obj, (datetime.date, datetime.datetime, decimal.Decimal, float)): return str(obj) raise TypeError("%r is not JSON serializable" % (obj,)) + def dataset_to_json(dataset, fp=None, default=default_json_converter, wrap=None): - """Converts a :class:`DataSet ` class or + """Converts a :class:`DataSet ` class or instance to JSON (JavaScript Object Notation). - + See :ref:`using-dataset-to-json` for detailed usage. - + Keyword Arguments - - **fp** + + **fp** An optional file-like object (must implement ``fp.write()``). When - this is not None, the output is written to the fp object, otherwise - the output is returned - + this is not None, the output is written to the fp object, otherwise + the output is returned + **default** - A callable that takes one argument (an object) and returns output - suitable for JSON serialization. This will *only* be called if the + A callable that takes one argument (an object) and returns output + suitable for JSON serialization. This will *only* be called if the object cannot be serialized. For example:: - + >>> def encode_complex(obj): ... if isinstance(obj, complex): ... return [obj.real, obj.imag] @@ -54,27 +56,27 @@ def dataset_to_json(dataset, fp=None, default=default_json_converter, wrap=None) >>> class ComplexData(DataSet): ... class math_stuff: ... complex = 2 + 1j - ... + ... >>> dataset_to_json(ComplexData, default=encode_complex) '[{"complex": [2.0, 1.0]}]' - + **wrap** - A callable that takes one argument, the list of dictionaries before + A callable that takes one argument, the list of dictionaries before they are converted to JSON. For example:: - + >>> def wrap_in_dict(objects): ... return {'data': objects} - ... + ... >>> from fixture import DataSet >>> class ColorData(DataSet): ... class red: ... color = "red" - ... + ... >>> dataset_to_json(ColorData, wrap=wrap_in_dict) '{"data": [{"color": "red"}]}' - + Returns a JSON encoded string unless you specified the **fp** keyword - + """ assert json, ( "You must have the simplejson or json module installed. " @@ -107,4 +109,4 @@ def dataset_to_json(dataset, fp=None, default=default_json_converter, wrap=None) if __name__ == '__main__': import doctest - doctest.testmod() \ No newline at end of file + doctest.testmod() diff --git a/fixture/dataset/dataset.py b/fixture/dataset/dataset.py index 94b419d..75c5fd6 100644 --- a/fixture/dataset/dataset.py +++ b/fixture/dataset/dataset.py @@ -1,18 +1,20 @@ """Representations of Data -The main class you will work with is :class:`DataSet` but there are a +The main class you will work with is :class:`DataSet` but there are a few variations on it: :class:`SuperSet` and :class:`MergedSuperSet` """ -import sys, types +import sys +import types from fixture.util import ObjRegistry + class DataContainer(object): """ Contains data accessible by attribute and/or key. - + for all internally used attributes, use the inner class Meta. On instances, use self.meta instead. """ @@ -20,24 +22,24 @@ class DataContainer(object): class Meta: data = None keys = None - + def __init__(self, data=None, keys=None): lazy_meta(self) - if not data: + if not data: data = {} self.meta.data = data - if not keys: + if not keys: keys = [] self.meta.keys = keys - + def __contains__(self, name): """True if name is a known key""" return name in self.meta.keys - + def __getitem__(self, key): """self['foo'] returns self.meta.data['foo']""" return self.meta.data[key] - + def __getattribute__(self, name): """Attributes are always fetched first from self.meta.data[name] if possible""" # it is necessary to completely override __getattr__ @@ -48,7 +50,7 @@ def __getattribute__(self, name): return self.meta.data[name] except KeyError: raise AttributeError("%s has no attribute '%s'" % (self, name)) - + def __repr__(self): if hasattr(self, 'meta'): keys = self.meta.keys @@ -57,11 +59,11 @@ def __repr__(self): return "<%s at %s with keys %s>" % ( self.__class__.__name__, hex(id(self)), keys) - + def get(self, k, default=None): """self.meta.get(k, default)""" return self.meta.data.get(k, default) - + def _setdata(self, key, value): """Adds value to self.meta.data[key]""" if key not in self.meta.data: @@ -73,7 +75,7 @@ class RefValue(object): def __init__(self, ref, attr_name): self.attr_name = attr_name self.ref = ref - + def __repr__(self): return "<%s.%s for %s.%s.%s (%s)>" % ( Ref.__name__, self.__class__.__name__, @@ -82,16 +84,16 @@ def __repr__(self): def __get__(self, obj, type=None): """Returns the :class:`Ref` instance or a value stored in the dataset. - - The value returned depends on how this instance of :class:`RefValue` is - accessed. - - Read more about the ``__get__`` `descriptor`_ to understand how it works or read + + The value returned depends on how this instance of :class:`RefValue` is + accessed. + + Read more about the ``__get__`` `descriptor`_ to understand how it works or read some `in-depth descriptor examples`_. - + .. _descriptor: http://docs.python.org/ref/descriptors.html .. _in-depth descriptor examples: http://users.rcn.com/python/download/Descriptor.htm - + """ if obj is None: # self was assigned to a class object @@ -110,43 +112,43 @@ def __get__(self, obj, type=None): class Ref(object): """A reference to a row in a DataSet class. - + An instance of this class is accessible on the inner class (a row) in a :class:`DataSet` as :class:`Row.ref() ` - - This allows a DataSet to reference an id column of a "foreign key" DataSet + + This allows a DataSet to reference an id column of a "foreign key" DataSet before it exists. - - Ref is a Descriptor containing a deferred value to an attribute of a data - object (like an instance of a SQLAlchemy mapped class). It provides the - DataSet a way to cloak the fact that "id" is an attribute only populated - after said data object is saved to the database. In other words, the - DataSet doesn't know or care when it has been loaded or not. It thinks it - is referencing "id" all the same. The timing of when id is accessed is + + Ref is a Descriptor containing a deferred value to an attribute of a data + object (like an instance of a SQLAlchemy mapped class). It provides the + DataSet a way to cloak the fact that "id" is an attribute only populated + after said data object is saved to the database. In other words, the + DataSet doesn't know or care when it has been loaded or not. It thinks it + is referencing "id" all the same. The timing of when id is accessed is handled by the LoadableFixture. - + """ Value = RefValue - + def __init__(self, dataset_class, row): self.dataset_class = dataset_class self.dataset_obj = None self.row = row # i.e. the name of the row class... self.key = self.row.__name__ - + def __call__(self, ref_name): """Return a :class:`RefValue` instance for ref_name""" return self.Value(self, ref_name) - + def __repr__(self): return "<%s to %s.%s at %s>" % ( - self.__class__.__name__, self.dataset_class.__name__, + self.__class__.__name__, self.dataset_class.__name__, self.row.__name__, hex(id(self))) def is_row_class(attr): attr_type = type(attr) - return ((attr_type==types.ClassType or attr_type==type) and - attr.__name__ != 'Meta' and + return ((attr_type==types.ClassType or attr_type==type) and + attr.__name__ != 'Meta' and not issubclass(attr, DataContainer.Meta)) class DataType(type): @@ -154,75 +156,75 @@ class DataType(type): Meta class for creating :class:`DataSet` classes. """ default_primary_key = ['id'] - + def __init__(cls, name, bases, cls_attr): super(DataType, cls).__init__(name, bases, dict) - + if 'Meta' in cls_attr and hasattr(cls_attr['Meta'], 'primary_key'): cls_attr['_primary_key'] = cls_attr['Meta'].primary_key else: cls_attr['_primary_key'] = cls.default_primary_key - + # just like dir(), we should do this in alpha order : ## NOTE: dropping support for <2.4 here... for name in sorted(cls_attr.keys()): attr = cls_attr[name] if is_row_class(attr): cls.decorate_row(attr, name, bases, cls_attr) - + del cls_attr['_primary_key'] - + def decorate_row(cls, row, name, bases, cls_attr): """Each row (an inner class) assigned to a :class:`DataSet` will be customized after it is created. - + This is because it's easier to type:: - + class MyData(DataSet): class foo: col1 = "bz" col2 = "bx" - + ... than it is to type: - + class MyData(DataSet): class foo(Row): col1 = "bz" col2 = "bx" - + (Note the subclassing that would be required in inner classes without this behavior.) - + But more importantly, rows must be able to inherit from other rows, like:: - + class MyData(DataSet): class joe: first_name = "Joe" last_name = "Phelps" class joe_gibbs(joe): last_name = "Gibbs" - + Here is what happens to each inner class object as it is assigned to a :class:`DataSet`: - + 1. A ``Row._dataset`` property is added which is a reference to the :class:`DataSet` instance. 2. A ``Row.ref()`` property (instance of :class:`Ref`) is added - 3. Any database primary key inherited from another Row is de-referenced - since primary keys must be unique per row. See :ref:`Using Dataset ` for an + 3. Any database primary key inherited from another Row is de-referenced + since primary keys must be unique per row. See :ref:`Using Dataset ` for an example of referencing primary key values that may or may not exist yet. - - + + """ # store a backref to the container dataset row._dataset = cls - + # bind a ref method row.ref = Ref(cls, row) - + # fix inherited primary keys names_to_uninherit = [] for name in dir(row): if name in cls_attr['_primary_key']: if name not in row.__dict__: - # then this was an inherited value, so we need to nullify it - # without 1) disturbing the other inherited values and 2) + # then this was an inherited value, so we need to nullify it + # without 1) disturbing the other inherited values and 2) # disturbing the inherited class. is this nuts? names_to_uninherit.append(name) bases_to_replace = [] @@ -239,21 +241,21 @@ class joe_gibbs(joe): for base_c, base_pos in bases_to_replace: # this may not work if the row's base was a new-style class new_base = types.ClassType( - base_c.__name__, base_c.__bases__, + base_c.__name__, base_c.__bases__, dict([(k, getattr(base_c, k)) for k in dir(base_c) \ if not k.startswith('_') and \ k not in names_to_uninherit])) new_bases[base_pos] = new_base if new_bases: row.__bases__ = tuple(new_bases) - + def is_rowlike(candidate): """returns True if candidate is *like* a DataRow. - + Not to be confused with issubclass(candidate, DataRow). - - A regular or new-style class is row-like because DataSet objects allow any + + A regular or new-style class is row-like because DataSet objects allow any type of class to declare a row of data """ return hasattr(candidate, '_dataset') and type(candidate._dataset) in ( @@ -264,16 +266,16 @@ class DataRow(object): a DataSet row, values accessible by attibute or key. """ _reserved_attr = ('columns',) - + def __init__(self, dataset): object.__setattr__(self, '_dataset', dataset) # i.e. the name of the row class... object.__setattr__(self, '_key', self.__class__.__name__) - + def __getitem__(self, item): """self['foo'] works the same as self.foo""" return getattr(self, item) - + def __getattr__(self, name): """Undefined attributes are fetched from the actual data object stored for this row.""" # an undefined data attribute was referenced, @@ -282,13 +284,13 @@ def __getattr__(self, name): # created only after load if name.startswith('_'): return object.__getattribute__(self, name) - + obj = self._dataset.meta._stored_objects.get_object(self._key) return getattr(obj, name) - + @classmethod def columns(self): - """Classmethod that yields all attribute names (except reserved attributes) + """Classmethod that yields all attribute names (except reserved attributes) in alphabetical order """ for k in dir(self): @@ -302,19 +304,19 @@ def __init__(self, dataset): list.__init__(self) self.dataset = dataset self._ds_key_map = {} - + def get_object(self, key): """returns the object at this key. - + In this example... - + >>> class EventData(DataSet): ... class click: ... id=1 - - ...the key is "click." The object returned would be an adapter for + + ...the key is "click." The object returned would be an adapter for EventData, probably an Event object - + """ try: return self[ self._ds_key_map[key] ] @@ -322,7 +324,7 @@ def get_object(self, key): etype, val, tb = sys.exc_info() raise etype("row '%s' hasn't been loaded for %s (loaded: %s)" % ( key, self.dataset, self)), None, tb - + def store(self, key, obj): self.append(obj) pos = len(self)-1 @@ -333,31 +335,31 @@ def store(self, key, obj): class DataSetMeta(DataContainer.Meta): """ Configures a DataSet class. - - When defining a :class:`DataSet` class, declare this as ``DataSet.Meta`` to configure the ``DataSet``. + + When defining a :class:`DataSet` class, declare this as ``DataSet.Meta`` to configure the ``DataSet``. The following are acknowledged attributes: ``storable`` - an object that should be used to store this :class:`DataSet`. If omitted the - loader's :class:`Style ` object will look for a storable object in its env, + an object that should be used to store this :class:`DataSet`. If omitted the + loader's :class:`Style ` object will look for a storable object in its env, using ``storable_name`` ``storable_name`` - the name of the storable object that the loader should fetch from - its env to load this ``DataSet`` with. If omitted, the loader's style - object will try to guess the storable_name based on its env and the + the name of the storable object that the loader should fetch from + its env to load this ``DataSet`` with. If omitted, the loader's style + object will try to guess the storable_name based on its env and the name of the ``DataSet`` class ``primary_key`` - this is a list of names that should be acknowledged as primary keys + this is a list of names that should be acknowledged as primary keys in a ``DataSet``. The default is simply ``['id']``. - - Here is an example of using an inner ``Meta`` class to specify a custom + + Here is an example of using an inner ``Meta`` class to specify a custom storable object to be used when storing a :class:`DataSet`:: - + >>> class RecipeStore(object): ... '''pretend this knows how to save recipes''' - ... + ... >>> class Recipes(DataSet): ... class Meta: ... storable = RecipeStore @@ -366,8 +368,8 @@ class DataSetMeta(DataContainer.Meta): ... name = "Clam Chowder" ... class tomato_bisque(chowder): ... name = "Tomato Bisque" - ... - + ... + """ row = DataRow storable = None @@ -381,63 +383,63 @@ class DataSetMeta(DataContainer.Meta): class DataSet(DataContainer): """ Defines data to be loaded - - A loader will typically want to load a dataset into a - single storage medium. I.E. a table in a database. - + + A loader will typically want to load a dataset into a + single storage medium. I.E. a table in a database. + For a complete example see :ref:`Using DataSet `. - - Note that rows are always classes until the dataset instance has been + + Note that rows are always classes until the dataset instance has been loaded:: - + >>> class Flowers(DataSet): ... class violets: ... color = 'blue' ... class roses: ... color = 'red' - ... + ... >>> f = Flowers() >>> f.violets.color 'blue' - + See :class:`DataType` for info on how inner classes are constructed. - - Row values can also be inherited from other rows, just as normal inheritance - works in Python. See the ``primary_key`` :class:`Meta ` attribute for how + + Row values can also be inherited from other rows, just as normal inheritance + works in Python. See the ``primary_key`` :class:`Meta ` attribute for how inheritance works on primary keys:: - + >>> class Recipes(DataSet): ... class chowder: ... is_soup = True ... name = "Clam Chowder" ... class tomato_bisque(chowder): ... name = "Tomato Bisque" - ... + ... >>> r = Recipes() >>> r.chowder.is_soup True >>> r.tomato_bisque.is_soup True - + Keyword Arguments: - + default_refclass A :class:`SuperSet` to use if None has already been specified in ``Meta`` - + See :class:`DataSetMeta` for details about the special inner ``Meta`` class - + See :ref:`Using Dataset ` for more examples of usage. - + """ __metaclass__ = DataType _reserved_attr = DataContainer._reserved_attr + ('data', 'shared_instance') ref = None Meta = DataSetMeta - + def __init__(self, default_refclass=None, default_meta=None): DataContainer.__init__(self) - - # we want the convenience of not having to + + # we want the convenience of not having to # inherit DataSet.Meta. hmmm ... if not default_meta: default_meta = DataSet.Meta @@ -446,18 +448,18 @@ def __init__(self, default_refclass=None, default_meta=None): for name in dir(defaults): if not hasattr(self.meta, name): setattr(self.meta, name, getattr(defaults, name)) - + self.meta._stored_objects = DataSetStore(self) - # dereference from class ... + # dereference from class ... try: cl_attr = getattr(self.Meta, 'references') except AttributeError: cl_attr = [] setattr(self.meta, 'references', [c for c in cl_attr]) - + if not default_refclass: default_refclass = SuperSet - + def mkref(): clean_refs = [] for ds in iter(self.meta.references): @@ -466,48 +468,48 @@ def mkref(): continue clean_refs.append(ds) self.meta.references = clean_refs - + return default_refclass(*[ - ds.shared_instance(default_refclass=default_refclass) + ds.shared_instance(default_refclass=default_refclass) for ds in iter(self.meta.references) ]) - + # data def style classes, so they have refs before data is walked if len(self.meta.references) > 0: self.ref = mkref() - + for key, data in self.data(): if key in self: raise ValueError( "data() cannot redeclare key '%s' " "(this is already an attribute)" % key) - + if isinstance(data, dict): # make a new class object for the row data # so that a loaded dataset can instantiate this... data = type(key, (self.meta.row,), data) self._setdata(key, data) - + if not self.ref: # type style classes, since refs were discovered above self.ref = mkref() - + def __iter__(self): """yields keys of self.meta""" for key in self.meta.keys: yield (key, getattr(self, key)) - + def data(self): """returns iterable key/dict pairs. - + .. note:: If possible, use attribute-style definition of rows and columns instead (explained above) - - You would only need to override this if you have a DataSet that will - break unless it is ordered very specifically. Since class-style DataSet - objects are just classes with attributes, its rows will be loaded in - alphabetical order. The alternative is to define a DataSet as follows. + + You would only need to override this if you have a DataSet that will + break unless it is ordered very specifically. Since class-style DataSet + objects are just classes with attributes, its rows will be loaded in + alphabetical order. The alternative is to define a DataSet as follows. However, note that this is not as functional as a class-style DataSet:: - + >>> class Birds(DataSet): ... def data(self): ... return ( @@ -515,46 +517,46 @@ def data(self): ... name="Blue Jay")), ... ('crow', dict( ... name="Crow")),) - ... + ... >>> b = Birds() >>> b.blue_jay.name 'Blue Jay' >>> b.crow.name 'Crow' - + """ if self.meta._built: for k,v in self: yield (k,v) - + def public_dir(obj): for name in dir(obj): if name.startswith("_"): continue yield name - + def add_ref_from_rowlike(rowlike): if rowlike._dataset not in self.meta.references: self.meta.references.append(rowlike._dataset) - + empty = True for name in public_dir(self.__class__): val = getattr(self.__class__, name) if not is_row_class(val): continue - + empty = False key = name row_class = val row = {} - + for col_name in public_dir(row_class): col_val = getattr(row_class, col_name) - + if isinstance(col_val, Ref): # the .ref attribute continue - elif type(col_val) in (types.ListType, types.TupleType): + elif type(col_val) in (types.ListType, types.TupleType, set): for c in col_val: if is_rowlike(c): add_ref_from_rowlike(c) @@ -577,18 +579,18 @@ def add_ref_from_rowlike(rowlike): if ref.dataset_class not in self.meta.references: # store the reference: self.meta.references.append(ref.dataset_class) - + row[col_name] = col_val yield (key, row) - + if empty: raise ValueError("cannot create an empty DataSet") self.meta._built = True - + @classmethod def shared_instance(cls, **kw): """Returns or creates the singleton instance for this :class:`DataSet` class""" - # fixme: default_refclass might be in **kw. But only a loader can set a + # fixme: default_refclass might be in **kw. But only a loader can set a # refclass. hmm if cls in dataset_registry: dataset = dataset_registry[cls] @@ -600,91 +602,91 @@ def shared_instance(cls, **kw): class DataSetContainer(object): """ A ``DataSet`` of :class:`DataSet` classes - + yields :class:`DataSet` classes when itered over. """ class Meta: datasets = None dataset_keys = None - + def __init__(self): lazy_meta(self) self.meta.datasets = {} self.meta.dataset_keys = [] self.meta._cache = ObjRegistry() - + def __iter__(self): """yields dataset keys""" for k in self.meta.dataset_keys: yield self.meta.datasets[k] - + def _dataset_to_key(self, dataset): """Returns a key for dataset (the name of the DataSet subclass)""" return dataset.__class__.__name__ - + def _setdataset(self, dataset, key=None, isref=False): """sets a dataset in this container. - + Returns False if DataSet has already been added and does nothing. Otherwise adds the DataSet and returns True. """ # due to reference resolution we might get colliding data sets... if dataset in self.meta._cache: return False - + if key is None: key = self._dataset_to_key(dataset) if not isref: # refs are not yielded self.meta.dataset_keys.append(key) - + self.meta.datasets[key] = dataset - + self.meta._cache.register(dataset) return True class SuperSet(DataContainer, DataSetContainer): """ A set of :class:`DataSet` classes. - + each attribute / key is a :class:`DataSet` instance. - + For example:: - + >>> from fixture import DataSet >>> from fixture.dataset import SuperSet >>> class RecipeData(DataSet): ... class tomato_bisque: ... name = "Tomato Bisque" - ... + ... >>> class CookwareData(DataSet): ... class pots: ... type = "cast-iron" - ... + ... >>> s = SuperSet(RecipeData(), CookwareData()) - + Now each instance is available by class name:: - + >>> s.RecipeData.tomato_bisque.name 'Tomato Bisque' >>> s.CookwareData.pots.type 'cast-iron' - + """ class Meta(DataContainer.Meta, DataSetContainer.Meta): pass - + def __init__(self, *datasets): DataContainer.__init__(self) DataSetContainer.__init__(self) self._store_datasets(datasets) - + def _store_datasets(self, datasets): for d in datasets: k = self._dataset_to_key(d) self._setdata(k, d) self._setdataset(d, key=k) - + for ref_d in d.ref: k = self._dataset_to_key(ref_d) self._setdata(k, ref_d) @@ -693,31 +695,31 @@ def _store_datasets(self, datasets): class MergedSuperSet(SuperSet): """ A collection of :class:`DataSet` instances. - - all attributes of all :class:`DataSet` classes are merged together so that they are + + all attributes of all :class:`DataSet` classes are merged together so that they are accessible in this class. Duplicate attribute names are not allowed. - + For example:: - + >>> from fixture import DataSet >>> from fixture.dataset import MergedSuperSet >>> class RecipeData(DataSet): ... class tomato_bisque: ... name = "Tomato Bisque" - ... + ... >>> class CookwareData(DataSet): ... class pots: ... type = "cast-iron" - ... + ... >>> m = MergedSuperSet(RecipeData(), CookwareData()) - + Now the rows of each ``DataSet`` are available as if they were rows of the ``MergedSuperSet``:: - + >>> m.tomato_bisque.name 'Tomato Bisque' >>> m.pots.type 'cast-iron' - + """ class Meta(SuperSet.Meta): pass @@ -725,7 +727,7 @@ def __init__(self, *datasets): lazy_meta(self) self.meta.keys_to_datasets = {} SuperSet.__init__(self, *datasets) - + def _setdataset(self, dataset, key=None, isref=False): if SuperSet._setdataset(self, dataset, key=key, isref=isref): for k,row in dataset: @@ -734,20 +736,20 @@ def _setdataset(self, dataset, key=None, isref=False): "cannot add key '%s' for %s because it was " "already added by %s" % ( k, dataset, self.meta.keys_to_datasets[k])) - + # need an instance here, if it's a class... if not isinstance(row, DataRow): row = row(dataset) self._setdata(k, row) - self.meta.keys_to_datasets[k] = dataset - + self.meta.keys_to_datasets[k] = dataset + def _store_datasets(self, datasets): for dataset in datasets: self._setdataset(dataset) - + for d in dataset.ref: self._setdataset(d, isref=True) - + def lazy_meta(obj): if not hasattr(obj, 'meta'): @@ -756,4 +758,4 @@ def lazy_meta(obj): if __name__ == '__main__': import doctest doctest.testmod() - + diff --git a/fixture/loadable/loadable.py b/fixture/loadable/loadable.py index 5664888..3100142 100644 --- a/fixture/loadable/loadable.py +++ b/fixture/loadable/loadable.py @@ -267,6 +267,9 @@ def resolve_stored_object(candidate): if type(val) in (types.ListType, types.TupleType): # i.e. categories = [python, ruby] setattr(row, name, map(resolve_stored_object, val)) + elif type(val) is set: + # i.e. categories = {python, ruby} + setattr(row, name, set(resolve_stored_object(v) for v in val)) elif is_rowlike(val): # i.e. category = python setattr(row, name, resolved_rowlike(val))