Skip to content

Commit

Permalink
improved sqlstore to not use eval/repr
Browse files Browse the repository at this point in the history
  • Loading branch information
James McGuinness committed Aug 12, 2013
1 parent 746f620 commit befac57
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 11 deletions.
2 changes: 1 addition & 1 deletion tests/serialize/runstate/tronstore/store_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def test_save(self):
self.store.save(key, state_data, data_type)

rows = self.store.engine.execute(self.store.service_table.select())
assert_equal(rows.fetchone(), (u'dotes', unicode(repr(self.store.serializer.serialize(state_data))), u'pickle'))
assert_equal(rows.fetchone(), ('dotes', self.store.serializer.serialize(state_data), 'pickle'))

def test_restore_success(self):
data_type = runstate.JOB_STATE
Expand Down
22 changes: 12 additions & 10 deletions tron/serialize/runstate/tronstore/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,26 +60,28 @@ def __init__(self, name, connection_details, serializer):
self.serializer = serializer
self.engine = sql.create_engine(connection_details,
connect_args={'check_same_thread': False},
poolclass=sql.pool.StaticPool)
poolclass=sql.pool.StaticPool,
encoding='ascii')
self.engine.raw_connection().connection.text_factory = str
self._setup_tables()

def _setup_tables(self):
self._metadata = sql.MetaData()
self.job_state_table = sql.Table('job_state_data', self._metadata,
sql.Column('key', sql.String(MAX_IDENTIFIER_LENGTH), primary_key=True),
sql.Column('state_data', sql.Text),
sql.Column('state_data', sql.LargeBinary),
sql.Column('serial_method', sql.String(MAX_IDENTIFIER_LENGTH)))
self.service_table = sql.Table('service_data', self._metadata,
sql.Column('key', sql.String(MAX_IDENTIFIER_LENGTH), primary_key=True),
sql.Column('state_data', sql.Text),
sql.Column('state_data', sql.LargeBinary),
sql.Column('serial_method', sql.String(MAX_IDENTIFIER_LENGTH)))
self.job_run_table = sql.Table('job_run_data', self._metadata,
sql.Column('key', sql.String(MAX_IDENTIFIER_LENGTH), primary_key=True),
sql.Column('state_data', sql.Text),
sql.Column('state_data', sql.LargeBinary),
sql.Column('serial_method', sql.String(MAX_IDENTIFIER_LENGTH)))
self.metadata_table = sql.Table('metadata_table', self._metadata,
sql.Column('key', sql.String(MAX_IDENTIFIER_LENGTH), primary_key=True),
sql.Column('state_data', sql.Text),
sql.Column('state_data', sql.LargeBinary),
sql.Column('serial_method', sql.String(MAX_IDENTIFIER_LENGTH)))

self._metadata.create_all(self.engine)
Expand Down Expand Up @@ -107,7 +109,7 @@ def save(self, key, state_data, data_type):
table = self._get_table(data_type)
if table is None:
return False
state_data = unicode(repr(self.serializer.serialize(state_data)))
state_data = self.serializer.serialize(state_data)
serial_method = self.serializer.name
update_result = conn.execute(
table.update()
Expand All @@ -132,13 +134,13 @@ def restore(self, key, data_type):
).fetchone()
if not result:
return (False, None)
elif str(result[1]) != self.serializer.name:
elif result[1] != self.serializer.name:
# TODO: If/when we have logging in the Tronstore process,
# log here that the db_store_method was different
serializer = serialize_class_map[str(result[1])]
return (True, serializer.deserialize(eval(str(result[0]))))
serializer = serialize_class_map[result[1]]
return (True, serializer.deserialize(result[0]))
else:
return (True, self.serializer.deserialize(eval(str(result[0]))))
return (True, self.serializer.deserialize(result[0]))

def cleanup(self):
if self._connection:
Expand Down

0 comments on commit befac57

Please sign in to comment.