Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit c84770b

Browse files
authored
Fix bgupdate error if index already exists (#2167)
When creating a new table index in the background, guard against it existing already. Fixes #2135. Also, make sure we restore the autocommit flag when we're done, otherwise we get more failures from other operations later on. Fixes #1890 (hopefully).
1 parent 380fb87 commit c84770b

File tree

1 file changed

+53
-30
lines changed

1 file changed

+53
-30
lines changed

synapse/storage/background_updates.py

+53-30
Original file line numberDiff line numberDiff line change
@@ -228,46 +228,69 @@ def register_background_index_update(self, update_name, index_name,
228228
columns (list[str]): columns/expressions to include in index
229229
"""
230230

231-
# if this is postgres, we add the indexes concurrently. Otherwise
232-
# we fall back to doing it inline
233-
if isinstance(self.database_engine, engines.PostgresEngine):
234-
conc = True
235-
else:
236-
conc = False
237-
# We don't use partial indices on SQLite as it wasn't introduced
238-
# until 3.8, and wheezy has 3.7
239-
where_clause = None
240-
241-
sql = (
242-
"CREATE INDEX %(conc)s %(name)s ON %(table)s (%(columns)s)"
243-
" %(where_clause)s"
244-
) % {
245-
"conc": "CONCURRENTLY" if conc else "",
246-
"name": index_name,
247-
"table": table,
248-
"columns": ", ".join(columns),
249-
"where_clause": "WHERE " + where_clause if where_clause else ""
250-
}
251-
252-
def create_index_concurrently(conn):
231+
def create_index_psql(conn):
253232
conn.rollback()
254233
# postgres insists on autocommit for the index
255234
conn.set_session(autocommit=True)
256-
c = conn.cursor()
257-
c.execute(sql)
258-
conn.set_session(autocommit=False)
259235

260-
def create_index(conn):
236+
try:
237+
c = conn.cursor()
238+
239+
# If a previous attempt to create the index was interrupted,
240+
# we may already have a half-built index. Let's just drop it
241+
# before trying to create it again.
242+
243+
sql = "DROP INDEX IF EXISTS %s" % (index_name,)
244+
logger.debug("[SQL] %s", sql)
245+
c.execute(sql)
246+
247+
sql = (
248+
"CREATE INDEX CONCURRENTLY %(name)s ON %(table)s"
249+
" (%(columns)s) %(where_clause)s"
250+
) % {
251+
"name": index_name,
252+
"table": table,
253+
"columns": ", ".join(columns),
254+
"where_clause": "WHERE " + where_clause if where_clause else ""
255+
}
256+
logger.debug("[SQL] %s", sql)
257+
c.execute(sql)
258+
finally:
259+
conn.set_session(autocommit=False)
260+
261+
def create_index_sqlite(conn):
262+
# Sqlite doesn't support concurrent creation of indexes.
263+
#
264+
# We don't use partial indices on SQLite as it wasn't introduced
265+
# until 3.8, and wheezy has 3.7
266+
#
267+
# We assume that sqlite doesn't give us invalid indices; however
268+
# we may still end up with the index existing but the
269+
# background_updates not having been recorded if synapse got shut
270+
# down at the wrong moment - hance we use IF NOT EXISTS. (SQLite
271+
# has supported CREATE TABLE|INDEX IF NOT EXISTS since 3.3.0.)
272+
sql = (
273+
"CREATE INDEX IF NOT EXISTS %(name)s ON %(table)s"
274+
" (%(columns)s)"
275+
) % {
276+
"name": index_name,
277+
"table": table,
278+
"columns": ", ".join(columns),
279+
}
280+
261281
c = conn.cursor()
282+
logger.debug("[SQL] %s", sql)
262283
c.execute(sql)
263284

285+
if isinstance(self.database_engine, engines.PostgresEngine):
286+
runner = create_index_psql
287+
else:
288+
runner = create_index_sqlite
289+
264290
@defer.inlineCallbacks
265291
def updater(progress, batch_size):
266292
logger.info("Adding index %s to %s", index_name, table)
267-
if conc:
268-
yield self.runWithConnection(create_index_concurrently)
269-
else:
270-
yield self.runWithConnection(create_index)
293+
yield self.runWithConnection(runner)
271294
yield self._end_background_update(update_name)
272295
defer.returnValue(1)
273296

0 commit comments

Comments
 (0)