alembic: Remove batch operations (and sqlite support)

Because SQLite doesn't support full ALTER capabilities, alembic scripts
require batch operations.  However, that capability wasn't available until
0.7.0 which some distributions haven't reached yet.  Therefore, the batch
operations introduced in commit 86d6e44cc (review 2319) have been reverted
and SQLite is unsupported again, for now anyway.

Tested the full upgrade and downgrade on MySQL/Mariadb and Postgresql.

ASTERISK-25890 #close
Reported-by: Harley Peters

Change-Id: I82eba5456736320256f6775f5b0b40133f4d1c80
changes/47/2547/2
George Joseph 9 years ago
parent 3e5672d843
commit 751d7a5a49

@ -58,8 +58,7 @@ def run_migrations_online():
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata,
render_as_batch=True
target_metadata=target_metadata
)
try:

@ -45,10 +45,7 @@ def upgrade():
context = op.get_context()
# Upgrading to this revision WILL clear your directmedia values.
if context.bind.dialect.name == 'sqlite':
with op.batch_alter_table('sippeers') as batch_op:
batch_op.alter_column('directmedia', type_=new_type)
elif context.bind.dialect.name != 'postgresql':
if context.bind.dialect.name != 'postgresql':
op.alter_column('sippeers', 'directmedia',
type_=new_type,
existing_type=old_type)
@ -69,10 +66,7 @@ def downgrade():
op.execute(tcr.update().where(tcr.c.directmedia==u'outgoing')
.values(directmedia=None))
if context.bind.dialect.name == 'sqlite':
with op.batch_alter_table('sippeers') as batch_op:
batch_op.alter_column('directmedia', type_=old_type)
elif context.bind.dialect.name != 'postgresql':
if context.bind.dialect.name != 'postgresql':
op.alter_column('sippeers', 'directmedia',
type_=old_type,
existing_type=new_type)

@ -17,5 +17,4 @@ def upgrade():
op.add_column('ps_globals', sa.Column('regcontext', sa.String(80)))
def downgrade():
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('regcontext')
op.drop_column('ps_globals', 'regcontext')

@ -19,5 +19,4 @@ def upgrade():
def downgrade():
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('default_from_user')
op.drop_column('ps_globals', 'default_from_user')

@ -33,11 +33,9 @@ import sqlalchemy as sa
def upgrade():
with op.batch_alter_table('sippeers') as batch_op:
batch_op.alter_column('useragent', type_=sa.String(255))
op.alter_column('sippeers', 'useragent', type_=sa.String(255))
def downgrade():
with op.batch_alter_table('sippeers') as batch_op:
batch_op.alter_column('useragent', type_=sa.String(20))
op.alter_column('sippeers', 'useragent', type_=sa.String(20))

@ -19,5 +19,4 @@ def upgrade():
def downgrade():
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('keep_alive_interval')
op.drop_column('ps_globals', 'keep_alive_interval')

@ -22,10 +22,7 @@ def upgrade():
def downgrade():
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('default_voicemail_extension')
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.drop_column('voicemail_extension')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('voicemail_extension')
batch_op.drop_column('mwi_subscribe_replaces_unsolicited')
op.drop_column('ps_globals', 'default_voicemail_extension')
op.drop_column('ps_aors', 'voicemail_extension')
op.drop_column('ps_endpoints', 'voicemail_extension')
op.drop_column('ps_endpoints', 'mwi_subscribe_replaces_unsolicited')

@ -17,5 +17,4 @@ def upgrade():
op.add_column('ps_endpoints', sa.Column('accountcode', sa.String(20)))
def downgrade():
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('accountcode')
op.drop_column('ps_endpoints', 'accountcode')

@ -18,5 +18,4 @@ def upgrade():
op.add_column('ps_globals', sa.Column('debug', sa.String(40)))
def downgrade():
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('debug')
op.drop_column('ps_globals', 'debug')

@ -45,5 +45,4 @@ def upgrade():
op.add_column('ps_endpoints', sa.Column('rpid_immediate', yesno_values))
def downgrade():
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('rpid_immediate')
op.drop_column('ps_endpoints', 'rpid_immediate')

@ -28,5 +28,4 @@ def upgrade():
def downgrade():
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('bind_rtp_to_media_address')
op.drop_column('ps_endpoints', 'bind_rtp_to_media_address')

@ -20,6 +20,5 @@ def upgrade():
def downgrade():
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('rtp_timeout')
batch_op.drop_column('rtp_timeout_hold')
op.drop_column('ps_endpoints', 'rtp_timeout')
op.drop_column('ps_endpoints', 'rtp_timeout_hold')

@ -27,5 +27,4 @@ def upgrade():
def downgrade():
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('g726_non_standard')
op.drop_column('ps_endpoints', 'g726_non_standard')

@ -19,5 +19,4 @@ def upgrade():
def downgrade():
with op.batch_alter_table('ps_registrations') as batch_op:
batch_op.drop_column('fatal_retry_interval')
op.drop_column('ps_registrations', 'fatal_retry_interval')

@ -15,10 +15,8 @@ import sqlalchemy as sa
def upgrade():
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.alter_column('contact', type_=sa.String(255))
op.alter_column('ps_aors', 'contact', type_=sa.String(255))
def downgrade():
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.alter_column('contact', type_=sa.String(40))
op.alter_column('ps_aors', 'contact', type_=sa.String(40))

@ -120,17 +120,15 @@ def upgrade():
op.create_index('ps_registrations_id', 'ps_registrations', ['id'])
########################## add columns ###########################
with op.batch_alter_table('ps_endpoints') as batch_op:
# new columns for endpoints
batch_op.add_column(sa.Column('media_address', sa.String(40)))
batch_op.add_column(sa.Column('redirect_method',
pjsip_redirect_method_values))
batch_op.add_column(sa.Column('set_var', sa.Text()))
op.add_column('ps_endpoints', sa.Column('media_address', sa.String(40)))
op.add_column('ps_endpoints', sa.Column('redirect_method',
pjsip_redirect_method_values))
op.add_column('ps_endpoints', sa.Column('set_var', sa.Text()))
# rename mwi_fromuser to mwi_from_user
batch_op.alter_column('mwi_fromuser',
new_column_name='mwi_from_user',
existing_type=sa.String(40))
op.alter_column('ps_endpoints', 'mwi_fromuser',
new_column_name='mwi_from_user', existing_type=sa.String(40))
# new columns for contacts
op.add_column('ps_contacts', sa.Column('outbound_proxy', sa.String(40)))
@ -144,23 +142,19 @@ def upgrade():
def downgrade():
########################## drop columns ##########################
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.drop_column('support_path')
batch_op.drop_column('outbound_proxy')
batch_op.drop_column('maximum_expiration')
op.drop_column('ps_aors', 'support_path')
op.drop_column('ps_aors', 'outbound_proxy')
op.drop_column('ps_aors', 'maximum_expiration')
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.drop_column('path')
batch_op.drop_column('outbound_proxy')
op.drop_column('ps_contacts', 'path')
op.drop_column('ps_contacts', 'outbound_proxy')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.alter_column('mwi_from_user',
new_column_name='mwi_fromuser',
existing_type=sa.String(40))
op.alter_column('ps_endpoints', 'mwi_from_user',
new_column_name='mwi_fromuser', existing_type=sa.String(40))
batch_op.drop_column('set_var')
batch_op.drop_column('redirect_method')
batch_op.drop_column('media_address')
op.drop_column('ps_endpoints', 'set_var')
op.drop_column('ps_endpoints', 'redirect_method')
op.drop_column('ps_endpoints', 'media_address')
########################## drop tables ###########################

@ -24,10 +24,7 @@ def upgrade():
context = op.get_context()
# Upgrading to this revision WILL clear your directmedia values.
if context.bind.dialect.name == 'sqlite':
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.alter_column('dtmf_mode', type_=new_type)
elif context.bind.dialect.name != 'postgresql':
if context.bind.dialect.name != 'postgresql':
op.alter_column('ps_endpoints', 'dtmf_mode',
type_=new_type,
existing_type=old_type)
@ -45,10 +42,7 @@ def upgrade():
def downgrade():
context = op.get_context()
if context.bind.dialect.name == 'sqlite':
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.alter_column('dtmf_mode', type_=old_type)
elif context.bind.dialect.name != 'postgresql':
if context.bind.dialect.name != 'postgresql':
op.alter_column('ps_endpoints', 'dtmf_mode',
type_=old_type,
existing_type=new_type)

@ -27,5 +27,4 @@ def upgrade():
op.add_column('ps_endpoints', sa.Column('user_eq_phone', yesno_values))
def downgrade():
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('user_eq_phone')
op.drop_column('ps_endpoints', 'user_eq_phone')

@ -20,7 +20,5 @@ def upgrade():
def downgrade():
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.drop_column('user_agent')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('message_context')
op.drop_column('ps_contacts', 'user_agent')
op.drop_column('ps_endpoints', 'message_context')

@ -22,5 +22,4 @@ def upgrade():
op.add_column('ps_transports', sa.Column('allow_reload', yesno_values))
def downgrade():
with op.batch_alter_table('ps_transports') as batch_op:
batch_op.drop_column('allow_reload')
op.drop_column('ps_transports', 'allow_reload')

@ -15,13 +15,9 @@ import sqlalchemy as sa
def upgrade():
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.alter_column('qualify_timeout', type_=sa.Float)
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.alter_column('qualify_timeout', type_=sa.Float)
op.alter_column('ps_aors', 'qualify_timeout', type_=sa.Float)
op.alter_column('ps_contacts', 'qualify_timeout', type_=sa.Float)
def downgrade():
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.alter_column('qualify_timeout', type_=sa.Integer)
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.alter_column('qualify_timeout', type_=sa.Integer)
op.alter_column('ps_aors', 'qualify_timeout', type_=sa.Integer)
op.alter_column('ps_contacts', 'qualify_timeout', type_=sa.Integer)

@ -18,5 +18,4 @@ def upgrade():
op.add_column('ps_globals', sa.Column('endpoint_identifier_order', sa.String(40)))
def downgrade():
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('endpoint_identifier_order')
op.drop_column('ps_globals', 'endpoint_identifier_order')

@ -18,7 +18,5 @@ def upgrade():
op.add_column('ps_contacts', sa.Column('qualify_timeout', sa.Integer))
def downgrade():
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.drop_column('qualify_timeout')
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.drop_column('qualify_timeout')
op.drop_column('ps_aors', 'qualify_timeout')
op.drop_column('ps_contacts', 'qualify_timeout')

@ -19,5 +19,4 @@ def upgrade():
def downgrade():
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('rtp_keepalive')
op.drop_column('ps_endpoints', 'rtp_keepalive')

@ -19,43 +19,35 @@ YESNO_NAME = 'yesno_values'
YESNO_VALUES = ['yes', 'no']
def upgrade():
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.alter_column('tos_audio',
type_=sa.String(10))
batch_op.alter_column('tos_video',
type_=sa.String(10))
batch_op.drop_column('cos_audio')
batch_op.drop_column('cos_video')
batch_op.add_column(sa.Column('cos_audio', sa.Integer))
batch_op.add_column(sa.Column('cos_video', sa.Integer))
with op.batch_alter_table('ps_transports') as batch_op:
batch_op.alter_column('tos',
type_=sa.String(10))
op.alter_column('ps_endpoints', 'tos_audio', type_=sa.String(10))
op.alter_column('ps_endpoints', 'tos_video', type_=sa.String(10))
op.drop_column('ps_endpoints', 'cos_audio')
op.drop_column('ps_endpoints', 'cos_video')
op.add_column('ps_endpoints', sa.Column('cos_audio', sa.Integer))
op.add_column('ps_endpoints', sa.Column('cos_video', sa.Integer))
# Can't cast YENO_VALUES to Integers, so dropping and adding is required
batch_op.drop_column('cos')
op.alter_column('ps_transports', 'tos', type_=sa.String(10))
batch_op.add_column(sa.Column('cos', sa.Integer))
# Can't cast YENO_VALUES to Integers, so dropping and adding is required
op.drop_column('ps_transports', 'cos')
op.add_column('ps_transports', sa.Column('cos', sa.Integer))
def downgrade():
yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
# Can't cast string to YESNO_VALUES, so dropping and adding is required
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('tos_audio')
batch_op.drop_column('tos_video')
batch_op.add_column(sa.Column('tos_audio', yesno_values))
batch_op.add_column(sa.Column('tos_video', yesno_values))
batch_op.drop_column('cos_audio')
batch_op.drop_column('cos_video')
batch_op.add_column(sa.Column('cos_audio', yesno_values))
batch_op.add_column(sa.Column('cos_video', yesno_values))
with op.batch_alter_table('ps_transports') as batch_op:
batch_op.drop_column('tos')
batch_op.add_column(sa.Column('tos', yesno_values))
op.drop_column('ps_endpoints', 'tos_audio')
op.drop_column('ps_endpoints', 'tos_video')
op.add_column('ps_endpoints', sa.Column('tos_audio', yesno_values))
op.add_column('ps_endpoints', sa.Column('tos_video', yesno_values))
op.drop_column('ps_endpoints', 'cos_audio')
op.drop_column('ps_endpoints', 'cos_video')
op.add_column('ps_endpoints', sa.Column('cos_audio', yesno_values))
op.add_column('ps_endpoints', sa.Column('cos_video', yesno_values))
op.drop_column('ps_transports', 'tos')
op.add_column('ps_transports', sa.Column('tos', yesno_values))
# Can't cast integers to YESNO_VALUES, so dropping and adding is required
batch_op.drop_column('cos')
batch_op.add_column(sa.Column('cos', yesno_values))
op.drop_column('ps_transports', 'cos')
op.add_column('ps_transports', sa.Column('cos', yesno_values))

@ -36,28 +36,19 @@ def upgrade():
context = op.get_context()
# Was unable to find a way to use op.alter_column() to add the unique
# index property.
if context.bind.dialect.name == 'sqlite':
with op.batch_alter_table('queue_members') as batch_op:
batch_op.create_primary_key('queue_members_pj', columns='uniqueid')
else:
op.drop_column('queue_members', 'uniqueid')
op.add_column(
'queue_members',
sa.Column(
name='uniqueid', type_=sa.Integer, nullable=False,
unique=True))
op.drop_column('queue_members', 'uniqueid')
op.add_column('queue_members', sa.Column(name='uniqueid', type_=sa.Integer,
nullable=False, unique=True))
# The postgres backend does not like the autoincrement needed for
# mysql here. It is just the backend that is giving a warning and
# not the database itself.
op.alter_column(
table_name='queue_members', column_name='uniqueid',
existing_type=sa.Integer, existing_nullable=False,
autoincrement=True)
op.alter_column(table_name='queue_members', column_name='uniqueid',
existing_type=sa.Integer, existing_nullable=False,
autoincrement=True)
def downgrade():
# Was unable to find a way to use op.alter_column() to remove the
# unique index property.
with op.batch_alter_table('queue_members') as batch_op:
batch_op.drop_column('uniqueid')
batch_op.add_column(sa.Column(name='uniqueid', type_=sa.String(80), nullable=False))
op.drop_column('queue_members', 'uniqueid')
op.add_column('queue_members', sa.Column(name='uniqueid', type_=sa.String(80), nullable=False))

@ -28,6 +28,5 @@ def upgrade():
op.add_column('ps_endpoints', sa.Column('media_use_received_transport', yesno_values))
def downgrade():
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('force_avp')
batch_op.drop_column('media_use_received_transport')
op.drop_column('ps_endpoints', 'force_avp')
op.drop_column('ps_endpoints', 'media_use_received_transport')

@ -17,5 +17,4 @@ def upgrade():
op.add_column('ps_globals', sa.Column('contact_expiration_check_interval', sa.Integer))
def downgrade():
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('contact_expiration_check_interval')
op.drop_column('ps_globals', 'contact_expiration_check_interval')

@ -19,13 +19,11 @@ YESNO_VALUES = ['yes', 'no']
def upgrade():
yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
with op.batch_alter_table('ps_transports') as batch_op:
batch_op.alter_column('verifiy_server', type_=yesno_values,
op.alter_column('ps_transports', 'verifiy_server', type_=yesno_values,
new_column_name='verify_server')
def downgrade():
yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
with op.batch_alter_table('ps_transports') as batch_op:
batch_op.alter_column('verify_server', type_=yesno_values,
op.alter_column('ps_transports', 'verify_server', type_=yesno_values,
new_column_name='verifiy_server')

@ -28,5 +28,4 @@ def upgrade():
def downgrade():
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('media_encryption_optimistic')
op.drop_column('ps_endpoints', 'media_encryption_optimistic')

@ -17,5 +17,4 @@ def upgrade():
op.add_column('ps_globals', sa.Column('max_initial_qualify_time', sa.Integer))
def downgrade():
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('max_initial_qualify_time')
op.drop_column('ps_globals', 'max_initial_qualify_time')

@ -29,8 +29,6 @@ def upgrade():
op.add_column('ps_registrations', sa.Column('endpoint', sa.String(40)))
def downgrade():
with op.batch_alter_table('ps_systems') as batch_op:
batch_op.drop_column('disable_tcp_switch')
with op.batch_alter_table('ps_registrations') as batch_op:
batch_op.drop_column('line')
batch_op.drop_column('endpoint')
op.drop_column('ps_systems', 'disable_tcp_switch')
op.drop_column('ps_registrations', 'line')
op.drop_column('ps_registrations', 'endpoint')

@ -15,28 +15,22 @@ import sqlalchemy as sa
def upgrade():
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.alter_column('user_agent', type_=sa.String(255))
op.alter_column('ps_globals', 'user_agent', type_=sa.String(255))
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.alter_column('id', type_=sa.String(255))
batch_op.alter_column('uri', type_=sa.String(255))
batch_op.alter_column('user_agent', type_=sa.String(255))
op.alter_column('ps_contacts', 'id', type_=sa.String(255))
op.alter_column('ps_contacts', 'uri', type_=sa.String(255))
op.alter_column('ps_contacts', 'user_agent', type_=sa.String(255))
with op.batch_alter_table('ps_registrations') as batch_op:
batch_op.alter_column('client_uri', type_=sa.String(255))
batch_op.alter_column('server_uri', type_=sa.String(255))
op.alter_column('ps_registrations', 'client_uri', type_=sa.String(255))
op.alter_column('ps_registrations', 'server_uri', type_=sa.String(255))
def downgrade():
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.alter_column('user_agent', type_=sa.String(40))
op.alter_column('ps_globals', 'user_agent', type_=sa.String(40))
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.alter_column('id', type_=sa.String(40))
batch_op.alter_column('uri', type_=sa.String(40))
batch_op.alter_column('user_agent', type_=sa.String(40))
op.alter_column('ps_contacts', 'id', type_=sa.String(40))
op.alter_column('ps_contacts', 'uri', type_=sa.String(40))
op.alter_column('ps_contacts', 'user_agent', type_=sa.String(40))
with op.batch_alter_table('ps_registrations') as batch_op:
batch_op.alter_column('client_uri', type_=sa.String(40))
batch_op.alter_column('server_uri', type_=sa.String(40))
op.alter_column('ps_registrations', 'client_uri', type_=sa.String(40))
op.alter_column('ps_registrations', 'server_uri', type_=sa.String(40))

Loading…
Cancel
Save