From d6cfdb40f972e09ae845b3f002f000de8829b1fa Mon Sep 17 00:00:00 2001 From: kennytm Date: Fri, 27 Mar 2020 17:13:51 +0800 Subject: [PATCH 1/2] restore: use cached InfoSchema rather than always fetching from etcd --- pkg/restore/client.go | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/pkg/restore/client.go b/pkg/restore/client.go index 56bb83233..e90e2c458 100644 --- a/pkg/restore/client.go +++ b/pkg/restore/client.go @@ -9,7 +9,6 @@ import ( "encoding/hex" "encoding/json" "fmt" - "math" "sort" "strconv" "sync" @@ -292,10 +291,7 @@ func (rc *Client) GetTableSchema( dbName model.CIStr, tableName model.CIStr, ) (*model.TableInfo, error) { - info, err := dom.GetSnapshotInfoSchema(math.MaxInt64) - if err != nil { - return nil, errors.Trace(err) - } + info := dom.InfoSchema() table, err := info.TableByName(dbName, tableName) if err != nil { return nil, errors.Trace(err) From 04a1fb6f786506ffe76459afe00924a068b95041 Mon Sep 17 00:00:00 2001 From: kennytm Date: Fri, 27 Mar 2020 18:28:25 +0800 Subject: [PATCH 2/2] tests: add a test case showing restoring 300 small tables is fast --- tests/br_300_small_tables/run.sh | 47 ++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 tests/br_300_small_tables/run.sh diff --git a/tests/br_300_small_tables/run.sh b/tests/br_300_small_tables/run.sh new file mode 100644 index 000000000..d02c30d99 --- /dev/null +++ b/tests/br_300_small_tables/run.sh @@ -0,0 +1,47 @@ +#!/bin/sh +# +# Copyright 2020 PingCAP, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eu +DB="$TEST_NAME" +TABLES_COUNT=300 + +run_sql "create schema $DB;" + +# generate 300 tables with 1 row content. +i=1 +while [ $i -le $TABLES_COUNT ]; do + run_sql "create table $DB.sbtest$i(id int primary key, k int not null, c char(120) not null, pad char(60) not null);" + run_sql "insert into $DB.sbtest$i values ($i, $i, '$i', '$i');" + i=$(($i+1)) +done + +# backup db +echo "backup start..." +run_br backup db --db "$DB" -s "local://$TEST_DIR/$DB" --pd $PD_ADDR + +# truncate every table +# (FIXME: drop instead of truncate. if we drop then create-table will still be executed and wastes time executing DDLs) +i=1 +while [ $i -le $TABLES_COUNT ]; do + run_sql "truncate $DB.sbtest$i;" + i=$(($i+1)) +done + +# restore db +# (FIXME: shouldn't need --no-schema to be fast, currently the alter-auto-id DDL slows things down) +echo "restore start..." +run_br restore db --db $DB -s "local://$TEST_DIR/$DB" --pd $PD_ADDR --no-schema + +run_sql "DROP DATABASE $DB;"