Sqlalchemy Autoloaded Orm Persistence
We are using sqlalchemy's autoload feature to do column mapping to prevent hardcoding in our code. class users(Base): __tablename__ = 'users' __table_args__ = { 'au
Solution 1:
What I am doing now is to pickle the metadata after running the reflection through a database connection (MySQL) and once a pickle is available use that pickled metadata to reflect on the schema with the metadata bound to an SQLite engine.
cachefile='orm.p'
dbfile='database'
engine_dev = create_engine(#db connect, echo=True)
engine_meta = create_engine('sqlite:///%s' % dbfile,echo=True)
Base = declarative_base()
Base.metadata.bind = engine_dev
metadata = MetaData(bind=engine_dev)
# load from pickle try:
withopen(cachefile, 'r') as cache:
metadata2 = pickle.load(cache)
metadata2.bind = engine_meta
cache.close()
classUsers(Base):
__table__ = Table('users', metadata2, autoload=True)
print"ORM loaded from pickle"# if no pickle, use reflect through database connection except:
classUsers(Base):
__table__ = Table('users', metadata, autoload=True)
print"ORM through database autoload"# create metapickle
metadata.create_all()
withopen(cachefile, 'w') as cache:
pickle.dump(metadata, cache)
cache.close()
Any comments if this is alright (it works) or there is something I can improve?
Solution 2:
My solution isn't terribly different from @user1572502's, but might be useful. I place my cached metadata files in ~/.sqlalchemy_cache
, but they can be anywhere.
# assuming something like this:
Base = declarative_base(bind=engine)
metadata_pickle_filename = "mydb_metadata_cache.pickle"# ------------------------------------------# Load the cached metadata if it's available# ------------------------------------------# NOTE: delete the cached file if the database schema changes!!
cache_path = os.path.join(os.path.expanduser("~"), ".sqlalchemy_cache")
cached_metadata = Noneif os.path.exists(cache_path):
try:
withopen(os.path.join(cache_path, metadata_pickle_filename), 'rb') as cache_file:
cached_metadata = pickle.load(file=cache_file)
except IOError:
# cache file not found - no problempass# ------------------------------------------# -----------------------------# Define database table classes# -----------------------------classMyTable(Base):
if cached_metadata:
__table__ = cached_metadata.tables['my_schema.my_table']
else:
__tablename__ = 'my_table'
__table_args__ = {'autoload':True, 'schema':'my_schema'}
# ... continue for any other tables ...# ----------------------------------------# If no cached metadata was found, save it# ----------------------------------------if cached_metadata isNone:
# cache the metadata for future loading# - MUST DELETE IF THE DATABASE SCHEMA HAS CHANGEDtry:
ifnot os.path.exists(cache_path):
os.makedirs(cache_path)
# make sure to open in binary mode - we're writing bytes, not strwithopen(os.path.join(cache_path, metadata_pickle_filename), 'wb') as cache_file:
pickle.dump(Base.metadata, cache_file)
except:
# couldn't write the file for some reasonpass
Important Note!! If the database schema changes, you must delete the cached file to force the code to autoload and create a new cache. If you don't, the changes will be be reflected in the code. It's an easy thing to forget.
Post a Comment for "Sqlalchemy Autoloaded Orm Persistence"