Merge "alembic: Fix downgrade and tweak for sqlite" into 13

changes/56/2356/1
zuul 9 years ago committed by Gerrit Code Review
commit 772036b525

@ -58,7 +58,8 @@ def run_migrations_online():
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata
target_metadata=target_metadata,
render_as_batch=True
)
try:

@ -45,7 +45,10 @@ def upgrade():
context = op.get_context()
# Upgrading to this revision WILL clear your directmedia values.
if context.bind.dialect.name != 'postgresql':
if context.bind.dialect.name == 'sqlite':
with op.batch_alter_table('sippeers') as batch_op:
batch_op.alter_column('directmedia', type_=new_type)
elif context.bind.dialect.name != 'postgresql':
op.alter_column('sippeers', 'directmedia',
type_=new_type,
existing_type=old_type)
@ -66,7 +69,10 @@ def downgrade():
op.execute(tcr.update().where(tcr.c.directmedia==u'outgoing')
.values(directmedia=None))
if context.bind.dialect.name != 'postgresql':
if context.bind.dialect.name == 'sqlite':
with op.batch_alter_table('sippeers') as batch_op:
batch_op.alter_column('directmedia', type_=old_type)
elif context.bind.dialect.name != 'postgresql':
op.alter_column('sippeers', 'directmedia',
type_=old_type,
existing_type=new_type)

@ -17,4 +17,5 @@ def upgrade():
op.add_column('ps_globals', sa.Column('regcontext', sa.String(80)))
def downgrade():
op.drop_column('ps_globals', 'regcontext')
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('regcontext')

@ -19,4 +19,5 @@ def upgrade():
def downgrade():
op.drop_column('ps_globals', 'default_from_user')
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('default_from_user')

@ -33,9 +33,11 @@ import sqlalchemy as sa
def upgrade():
op.alter_column('sippeers', 'useragent', type_=sa.String(255))
with op.batch_alter_table('sippeers') as batch_op:
batch_op.alter_column('useragent', type_=sa.String(255))
def downgrade():
op.alter_column('sippeers', 'useragent', type_=sa.String(20))
with op.batch_alter_table('sippeers') as batch_op:
batch_op.alter_column('useragent', type_=sa.String(20))

@ -19,4 +19,5 @@ def upgrade():
def downgrade():
op.drop_column('ps_globals', 'keep_alive_interval')
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('keep_alive_interval')

@ -17,4 +17,5 @@ def upgrade():
op.add_column('ps_endpoints', sa.Column('accountcode', sa.String(20)))
def downgrade():
op.drop_column('ps_endpoints', 'accountcode')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('accountcode')

@ -18,4 +18,5 @@ def upgrade():
op.add_column('ps_globals', sa.Column('debug', sa.String(40)))
def downgrade():
op.drop_column('ps_globals', 'debug')
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('debug')

@ -45,4 +45,5 @@ def upgrade():
op.add_column('ps_endpoints', sa.Column('rpid_immediate', yesno_values))
def downgrade():
op.drop_column('ps_endpoints', 'rpid_immediate')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('rpid_immediate')

@ -28,4 +28,5 @@ def upgrade():
def downgrade():
op.drop_column('ps_endpoints', 'bind_rtp_to_media_address')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('bind_rtp_to_media_address')

@ -20,5 +20,6 @@ def upgrade():
def downgrade():
op.drop_column('ps_endpoints', 'rtp_timeout')
op.drop_column('ps_endpoints', 'rtp_timeout_hold')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('rtp_timeout')
batch_op.drop_column('rtp_timeout_hold')

@ -27,4 +27,5 @@ def upgrade():
def downgrade():
op.drop_column('ps_endpoints', 'g726_non_standard')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('g726_non_standard')

@ -19,4 +19,5 @@ def upgrade():
def downgrade():
op.drop_column('ps_registrations', 'fatal_retry_interval')
with op.batch_alter_table('ps_registrations') as batch_op:
batch_op.drop_column('fatal_retry_interval')

@ -15,8 +15,10 @@ import sqlalchemy as sa
def upgrade():
op.alter_column('ps_aors', 'contact', type_=sa.String(255))
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.alter_column('contact', type_=sa.String(255))
def downgrade():
op.alter_column('ps_aors', 'contact', type_=sa.String(40))
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.alter_column('contact', type_=sa.String(40))

@ -120,15 +120,15 @@ def upgrade():
op.create_index('ps_registrations_id', 'ps_registrations', ['id'])
########################## add columns ###########################
with op.batch_alter_table('ps_endpoints') as batch_op:
# new columns for endpoints
op.add_column('ps_endpoints', sa.Column('media_address', sa.String(40)))
op.add_column('ps_endpoints', sa.Column('redirect_method',
batch_op.add_column(sa.Column('media_address', sa.String(40)))
batch_op.add_column(sa.Column('redirect_method',
pjsip_redirect_method_values))
op.add_column('ps_endpoints', sa.Column('set_var', sa.Text()))
batch_op.add_column(sa.Column('set_var', sa.Text()))
# rename mwi_fromuser to mwi_from_user
op.alter_column('ps_endpoints', 'mwi_fromuser',
batch_op.alter_column('mwi_fromuser',
new_column_name='mwi_from_user',
existing_type=sa.String(40))
@ -144,20 +144,23 @@ def upgrade():
def downgrade():
########################## drop columns ##########################
op.drop_column('ps_aors', 'support_path')
op.drop_column('ps_aors', 'outbound_proxy')
op.drop_column('ps_aors', 'maximum_expiration')
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.drop_column('support_path')
batch_op.drop_column('outbound_proxy')
batch_op.drop_column('maximum_expiration')
op.drop_column('ps_contacts', 'path')
op.drop_column('ps_contacts', 'outbound_proxy')
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.drop_column('path')
batch_op.drop_column('outbound_proxy')
op.alter_column('ps_endpoints', 'mwi_from_user',
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.alter_column('mwi_from_user',
new_column_name='mwi_fromuser',
existing_type=sa.String(40))
op.drop_column('ps_endpoints', 'set_var')
op.drop_column('ps_endpoints', 'redirect_method')
op.drop_column('ps_endpoints', 'media_address')
batch_op.drop_column('set_var')
batch_op.drop_column('redirect_method')
batch_op.drop_column('media_address')
########################## drop tables ###########################

@ -20,14 +20,14 @@ NEW_ENUM = ['rfc4733', 'inband', 'info', 'auto']
old_type = sa.Enum(*OLD_ENUM, name='pjsip_dtmf_mode_values')
new_type = sa.Enum(*NEW_ENUM, name='pjsip_dtmf_mode_values_v2')
tcr = sa.sql.table('ps_endpoints', sa.Column('dtmf_mode', new_type,
nullable=True))
def upgrade():
context = op.get_context()
# Upgrading to this revision WILL clear your directmedia values.
if context.bind.dialect.name != 'postgresql':
if context.bind.dialect.name == 'sqlite':
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.alter_column('dtmf_mode', type_=new_type)
elif context.bind.dialect.name != 'postgresql':
op.alter_column('ps_endpoints', 'dtmf_mode',
type_=new_type,
existing_type=old_type)
@ -45,10 +45,10 @@ def upgrade():
def downgrade():
context = op.get_context()
op.execute(tcr.update().where(tcr.c.directmedia==u'outgoing')
.values(directmedia=None))
if context.bind.dialect.name != 'postgresql':
if context.bind.dialect.name == 'sqlite':
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.alter_column('dtmf_mode', type_=old_type)
elif context.bind.dialect.name != 'postgresql':
op.alter_column('ps_endpoints', 'dtmf_mode',
type_=old_type,
existing_type=new_type)

@ -27,4 +27,5 @@ def upgrade():
op.add_column('ps_endpoints', sa.Column('user_eq_phone', yesno_values))
def downgrade():
op.drop_column('ps_endpoints', 'user_eq_phone')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('user_eq_phone')

@ -20,5 +20,7 @@ def upgrade():
def downgrade():
op.drop_column('ps_contacts', 'user_agent')
op.drop_column('ps_endpoints', 'message_context')
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.drop_column('user_agent')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('message_context')

@ -20,8 +20,7 @@ YESNO_VALUES = ['yes', 'no']
def upgrade():
yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
op.add_column('ps_transports', sa.Column('allow_reload', yesno_values))
pass
def downgrade():
op.drop_column('ps_transports', 'allow_reload')
pass
with op.batch_alter_table('ps_transports') as batch_op:
batch_op.drop_column('allow_reload')

@ -13,14 +13,14 @@ down_revision = '136885b81223'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.alter_column('ps_aors', 'qualify_timeout', type_=sa.Float)
op.alter_column('ps_contacts', 'qualify_timeout', type_=sa.Float)
pass
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.alter_column('qualify_timeout', type_=sa.Float)
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.alter_column('qualify_timeout', type_=sa.Float)
def downgrade():
op.alter_column('ps_aors', 'qualify_timeout', type_=sa.Integer)
op.alter_column('ps_contacts', 'qualify_timeout', type_=sa.Integer)
pass
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.alter_column('qualify_timeout', type_=sa.Integer)
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.alter_column('qualify_timeout', type_=sa.Integer)

@ -12,6 +12,7 @@ down_revision = '4da0c5f79a9c'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import ENUM
YESNO_VALUES = ['yes', 'no']
@ -181,9 +182,21 @@ def upgrade():
def downgrade():
context = op.get_context()
op.drop_table('ps_endpoints')
op.drop_table('ps_auths')
op.drop_table('ps_aors')
op.drop_table('ps_contacts')
op.drop_table('ps_domain_aliases')
op.drop_table('ps_endpoint_id_ips')
enums = ['yesno_values',
'pjsip_100rel_values','pjsip_auth_type_values','pjsip_cid_privacy_values',
'pjsip_connected_line_method_values','pjsip_direct_media_glare_mitigation_values',
'pjsip_dtls_setup_values','pjsip_dtmf_mode_values','pjsip_identify_by_values',
'pjsip_media_encryption_values','pjsip_t38udptl_ec_values','pjsip_timer_values']
if context.bind.dialect.name == 'postgresql':
for e in enums:
ENUM(name=e).drop(op.get_bind(), checkfirst=False)

@ -18,4 +18,5 @@ def upgrade():
op.add_column('ps_globals', sa.Column('endpoint_identifier_order', sa.String(40)))
def downgrade():
op.drop_column('ps_globals', 'endpoint_identifier_order')
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('endpoint_identifier_order')

@ -16,10 +16,9 @@ import sqlalchemy as sa
def upgrade():
op.add_column('ps_aors', sa.Column('qualify_timeout', sa.Integer))
op.add_column('ps_contacts', sa.Column('qualify_timeout', sa.Integer))
pass
def downgrade():
op.drop_column('ps_aors', 'qualify_timeout')
op.drop_column('ps_contacts', 'qualify_timeout')
pass
with op.batch_alter_table('ps_aors') as batch_op:
batch_op.drop_column('qualify_timeout')
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.drop_column('qualify_timeout')

@ -19,4 +19,5 @@ def upgrade():
def downgrade():
op.drop_column('ps_endpoints', 'rtp_keepalive')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('rtp_keepalive')

@ -19,43 +19,43 @@ YESNO_NAME = 'yesno_values'
YESNO_VALUES = ['yes', 'no']
def upgrade():
op.alter_column('ps_endpoints', 'tos_audio',
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.alter_column('tos_audio',
type_=sa.String(10))
op.alter_column('ps_endpoints', 'tos_video',
batch_op.alter_column('tos_video',
type_=sa.String(10))
op.alter_column('ps_transports', 'tos',
batch_op.drop_column('cos_audio')
batch_op.drop_column('cos_video')
batch_op.add_column(sa.Column('cos_audio', sa.Integer))
batch_op.add_column(sa.Column('cos_video', sa.Integer))
with op.batch_alter_table('ps_transports') as batch_op:
batch_op.alter_column('tos',
type_=sa.String(10))
# Can't cast YENO_VALUES to Integers, so dropping and adding is required
op.drop_column('ps_endpoints', 'cos_audio')
op.drop_column('ps_endpoints', 'cos_video')
op.drop_column('ps_transports', 'cos')
op.add_column('ps_endpoints', sa.Column('cos_audio', sa.Integer))
op.add_column('ps_endpoints', sa.Column('cos_video', sa.Integer))
op.add_column('ps_transports', sa.Column('cos', sa.Integer))
pass
batch_op.drop_column('cos')
batch_op.add_column(sa.Column('cos', sa.Integer))
def downgrade():
yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
# Can't cast string to YESNO_VALUES, so dropping and adding is required
op.drop_column('ps_endpoints', 'tos_audio')
op.drop_column('ps_endpoints', 'tos_video')
op.drop_column('ps_transports', 'tos')
op.add_column('ps_endpoints', sa.Column('tos_audio', yesno_values))
op.add_column('ps_endpoints', sa.Column('tos_video', yesno_values))
op.add_column('ps_transports', sa.Column('tos', yesno_values))
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('tos_audio')
batch_op.drop_column('tos_video')
batch_op.add_column(sa.Column('tos_audio', yesno_values))
batch_op.add_column(sa.Column('tos_video', yesno_values))
batch_op.drop_column('cos_audio')
batch_op.drop_column('cos_video')
batch_op.add_column(sa.Column('cos_audio', yesno_values))
batch_op.add_column(sa.Column('cos_video', yesno_values))
with op.batch_alter_table('ps_transports') as batch_op:
batch_op.drop_column('tos')
batch_op.add_column(sa.Column('tos', yesno_values))
# Can't cast integers to YESNO_VALUES, so dropping and adding is required
op.drop_column('ps_endpoints', 'cos_audio')
op.drop_column('ps_endpoints', 'cos_video')
op.drop_column('ps_transports', 'cos')
op.add_column('ps_endpoints', sa.Column('cos_audio', yesno_values))
op.add_column('ps_endpoints', sa.Column('cos_video', yesno_values))
op.add_column('ps_transports', sa.Column('cos', yesno_values))
pass
batch_op.drop_column('cos')
batch_op.add_column(sa.Column('cos', yesno_values))

@ -30,7 +30,7 @@ down_revision = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import ENUM
YESNO_VALUES = ['yes', 'no']
TYPE_VALUES = ['friend', 'user', 'peer']
@ -323,8 +323,20 @@ def upgrade():
def downgrade():
context = op.get_context()
op.drop_table('sippeers')
op.drop_table('iaxfriends')
op.drop_table('voicemail')
op.drop_table('meetme')
op.drop_table('musiconhold')
enums = ['type_values', 'yes_no_values',
'sip_transport_values','sip_dtmfmode_values','sip_directmedia_values',
'sip_progressinband_values','sip_session_timers_values','sip_session_refresher_values',
'sip_callingpres_values','iax_requirecalltoken_values','iax_encryption_values',
'iax_transfer_values','moh_mode_values']
if context.bind.dialect.name == 'postgresql':
for e in enums:
ENUM(name=e).drop(op.get_bind(), checkfirst=False)

@ -33,28 +33,31 @@ import sqlalchemy as sa
def upgrade():
context = op.get_context()
# Was unable to find a way to use op.alter_column() to add the unique
# index property.
op.drop_column('queue_members', 'uniqueid')
op.add_column(
'queue_members',
sa.Column(
name='uniqueid', type_=sa.Integer, nullable=False,
unique=True))
if context.bind.dialect.name == 'sqlite':
with op.batch_alter_table('queue_members') as batch_op:
batch_op.create_primary_key('queue_members_pj', columns='uniqueid')
else:
op.drop_column('queue_members', 'uniqueid')
op.add_column(
'queue_members',
sa.Column(
name='uniqueid', type_=sa.Integer, nullable=False,
unique=True))
# The postgres backend does not like the autoincrement needed for
# mysql here. It is just the backend that is giving a warning and
# not the database itself.
op.alter_column(
table_name='queue_members', column_name='uniqueid',
existing_type=sa.Integer, existing_nullable=False,
autoincrement=True)
op.alter_column(
table_name='queue_members', column_name='uniqueid',
existing_type=sa.Integer, existing_nullable=False,
autoincrement=True)
def downgrade():
# Was unable to find a way to use op.alter_column() to remove the
# unique index property.
op.drop_column('queue_members', 'uniqueid')
op.add_column(
'queue_members',
sa.Column(name='uniqueid', type_=sa.String(80), nullable=False))
with op.batch_alter_table('queue_members') as batch_op:
batch_op.drop_column('uniqueid')
batch_op.add_column(sa.Column(name='uniqueid', type_=sa.String(80), nullable=False))

@ -28,5 +28,6 @@ def upgrade():
op.add_column('ps_endpoints', sa.Column('media_use_received_transport', yesno_values))
def downgrade():
op.drop_column('ps_endpoints', 'force_avp')
op.drop_column('ps_endpoints', 'media_use_received_transport')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('force_avp')
batch_op.drop_column('media_use_received_transport')

@ -19,11 +19,13 @@ YESNO_VALUES = ['yes', 'no']
def upgrade():
yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
op.alter_column('ps_transports', 'verifiy_server', type_=yesno_values,
with op.batch_alter_table('ps_transports') as batch_op:
batch_op.alter_column('verifiy_server', type_=yesno_values,
new_column_name='verify_server')
def downgrade():
yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False)
op.alter_column('ps_transports', 'verify_server', type_=yesno_values,
with op.batch_alter_table('ps_transports') as batch_op:
batch_op.alter_column('verify_server', type_=yesno_values,
new_column_name='verifiy_server')

@ -17,4 +17,5 @@ def upgrade():
op.add_column('ps_globals', sa.Column('max_initial_qualify_time', sa.Integer))
def downgrade():
op.drop_column('ps_globals', 'max_initial_qualify_time')
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.drop_column('max_initial_qualify_time')

@ -27,11 +27,10 @@ def upgrade():
op.add_column('ps_systems', sa.Column('disable_tcp_switch', yesno_values))
op.add_column('ps_registrations', sa.Column('line', yesno_values))
op.add_column('ps_registrations', sa.Column('endpoint', sa.String(40)))
pass
def downgrade():
op.drop_column('ps_systems', 'disable_tcp_switch')
op.drop_column('ps_registrations', 'line')
op.drop_column('ps_registrations', 'endpoint')
pass
with op.batch_alter_table('ps_systems') as batch_op:
batch_op.drop_column('disable_tcp_switch')
with op.batch_alter_table('ps_registrations') as batch_op:
batch_op.drop_column('line')
batch_op.drop_column('endpoint')

@ -15,25 +15,28 @@ import sqlalchemy as sa
def upgrade():
op.alter_column('ps_globals', 'user_agent', type_=sa.String(255))
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.alter_column('user_agent', type_=sa.String(255))
op.alter_column('ps_contacts', 'id', type_=sa.String(255))
op.alter_column('ps_contacts', 'uri', type_=sa.String(255))
op.alter_column('ps_contacts', 'user_agent', type_=sa.String(255))
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.alter_column('id', type_=sa.String(255))
batch_op.alter_column('uri', type_=sa.String(255))
batch_op.alter_column('user_agent', type_=sa.String(255))
op.alter_column('ps_registrations', 'client_uri', type_=sa.String(255))
op.alter_column('ps_registrations', 'server_uri', type_=sa.String(255))
with op.batch_alter_table('ps_registrations') as batch_op:
batch_op.alter_column('client_uri', type_=sa.String(255))
batch_op.alter_column('server_uri', type_=sa.String(255))
def downgrade():
op.alter_column('ps_registrations', 'server_uri', type_=sa.String(40))
op.alter_column('ps_registrations', 'client_uri', type_=sa.String(40))
op.alter_column('ps_contacts', 'user_agent', type_=sa.String(40))
op.alter_column('ps_contacts', 'uri', type_=sa.String(40))
op.alter_column('ps_contacts', 'id', type_=sa.String(40))
op.alter_column('ps_globals', 'user_agent', type_=sa.String(40))
with op.batch_alter_table('ps_globals') as batch_op:
batch_op.alter_column('user_agent', type_=sa.String(40))
with op.batch_alter_table('ps_contacts') as batch_op:
batch_op.alter_column('id', type_=sa.String(40))
batch_op.alter_column('uri', type_=sa.String(40))
batch_op.alter_column('user_agent', type_=sa.String(40))
with op.batch_alter_table('ps_registrations') as batch_op:
batch_op.alter_column('client_uri', type_=sa.String(40))
batch_op.alter_column('server_uri', type_=sa.String(40))

@ -28,4 +28,5 @@ def upgrade():
def downgrade():
op.drop_column('ps_endpoints', 'media_encryption_optimistic')
with op.batch_alter_table('ps_endpoints') as batch_op:
batch_op.drop_column('media_encryption_optimistic')

Loading…
Cancel
Save