Skip to content

Commit 75e52b6

Browse files
Pekka NousiainenPiotr Obrzut
authored andcommitted
wl#7614 auto-incr1.diff
handle autoincrement temporary errors (cherry picked from commit 4ffa72047fcd2663a25b22d28cc66999dc7fec85)
1 parent 59dd63d commit 75e52b6

File tree

6 files changed

+159
-33
lines changed

6 files changed

+159
-33
lines changed

mysql-test/suite/ndb/r/ndb_import0.result

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,4 +30,21 @@ where x.a = y.a
3030
and (x.b = y.b or (x.b is null and y.b is null));
3131
count(*)
3232
4
33-
drop table t1, t2, t2ver;
33+
# simple hidden-pk test
34+
create table t3 (
35+
b int not null,
36+
# unique key would turn into pk
37+
key bx (b)
38+
) engine=ndb;
39+
create table t3ver like t3;
40+
select count(*) from t3;
41+
count(*)
42+
1000
43+
select count(*) from t3ver;
44+
count(*)
45+
1000
46+
select count(*) from t3 x, t3ver y
47+
where x.b = y.b;
48+
count(*)
49+
1000
50+
drop table t1, t2, t2ver, t3, t3ver;
Lines changed: 26 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,30 +1,38 @@
11
# large ndb_import tests
2-
# temporary error test
2+
# node failure test
33
create table t1 (
44
a int not null,
55
b int not null,
66
primary key using hash (a)
77
) engine ndb;
88
create table t1ver like t1;
9-
show create table t1;
10-
Table Create Table
11-
t1 CREATE TABLE `t1` (
12-
`a` int(11) NOT NULL,
13-
`b` int(11) NOT NULL,
14-
PRIMARY KEY (`a`) USING HASH
15-
) ENGINE=ndbcluster DEFAULT CHARSET=latin1
16-
show create table t1ver;
17-
Table Create Table
18-
t1ver CREATE TABLE `t1ver` (
19-
`a` int(11) NOT NULL,
20-
`b` int(11) NOT NULL,
21-
PRIMARY KEY (`a`) USING HASH
22-
) ENGINE=ndbcluster DEFAULT CHARSET=latin1
9+
create table t2 (
10+
b int not null,
11+
# unique key would turn into pk
12+
key (b)
13+
) engine ndb;
14+
create table t2ver like t2;
15+
# load t1ver, t2ver
16+
# import t1, t2
17+
# verify t1, t2
2318
select count(*) from t1;
2419
count(*)
25-
1000000
20+
400000
21+
select count(*) from t1ver;
22+
count(*)
23+
400000
2624
select count(*) from t1 x, t1ver y
2725
where x.a = y.a and x.b = y.b;
2826
count(*)
29-
1000000
30-
drop table t1, t1ver;
27+
400000
28+
select count(*) from t2;
29+
count(*)
30+
400000
31+
select count(*) from t2ver;
32+
count(*)
33+
400000
34+
select count(*) from t2 x, t2ver y
35+
where x.b = y.b;
36+
count(*)
37+
400000
38+
drop table t1, t1ver, t2, t2ver;

mysql-test/suite/ndb/t/ndb_import.pl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -292,6 +292,7 @@ sub run_import {
292292
push(@cmd, "--output-type=ndb");
293293
push(@cmd, "--output-workers=2");
294294
push(@cmd, "--db-workers=2");
295+
push(@cmd, "--temperrors=100");
295296
push(@cmd, "--fields-terminated-by='$fter'");
296297
if (defined($fenc)) {
297298
push(@cmd, "--fields-optionally-enclosed-by='$fenc'");

mysql-test/suite/ndb/t/ndb_import0.test

Lines changed: 42 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,4 +118,45 @@ select count(*) from t2 x, t2ver y
118118
where x.a = y.a
119119
and (x.b = y.b or (x.b is null and y.b is null));
120120

121-
drop table t1, t2, t2ver;
121+
--echo # simple hidden-pk test
122+
123+
perl;
124+
use strict;
125+
use Symbol;
126+
my $vardir = $ENV{MYSQLTEST_VARDIR}
127+
or die "need MYSQLTEST_VARDIR";
128+
my $file = "$vardir/tmp/t3.csv";
129+
my $fh = gensym();
130+
open($fh, ">$file")
131+
or die "$file: open for write failed: $!";
132+
for my $i (1..1000) {
133+
print $fh 10*$i, "\n";
134+
}
135+
close($fh)
136+
or die "$file: close after write failed: $!";
137+
exit(0);
138+
EOF
139+
140+
create table t3 (
141+
b int not null,
142+
# unique key would turn into pk
143+
key bx (b)
144+
) engine=ndb;
145+
146+
create table t3ver like t3;
147+
148+
exec $NDB_IMPORT --state-dir=$MYSQLTEST_VARDIR/tmp
149+
test $MYSQLTEST_VARDIR/tmp/t3.csv >> $NDB_TOOLS_OUTPUT 2>&1;
150+
151+
--disable_query_log
152+
eval load data infile '$MYSQLTEST_VARDIR/tmp/t3.csv'
153+
into table t3ver;
154+
--enable_query_log
155+
156+
select count(*) from t3;
157+
select count(*) from t3ver;
158+
159+
select count(*) from t3 x, t3ver y
160+
where x.b = y.b;
161+
162+
drop table t1, t2, t2ver, t3, t3ver;

mysql-test/suite/ndb/t/ndb_import2.test

Lines changed: 60 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
--echo # large ndb_import tests
77

8-
--echo # temporary error test
8+
--echo # node failure test
99

1010
perl;
1111
use strict;
@@ -16,14 +16,31 @@ my $file = "$vardir/tmp/t1.csv";
1616
my $fh = gensym();
1717
open($fh, ">$file")
1818
or die "$file: open for write failed: $!";
19-
for (my $i = 0; $i < 1000000; $i++) {
19+
for (my $i = 0; $i < 400000; $i++) {
2020
print $fh $i, "\t", int(rand(1000)), "\n";
2121
}
2222
close($fh)
2323
or "$file: close after write failed: $!";
2424
exit(0);
2525
EOF
2626

27+
perl;
28+
use strict;
29+
use Symbol;
30+
my $vardir = $ENV{MYSQLTEST_VARDIR}
31+
or die "need MYSQLTEST_VARDIR";
32+
my $file = "$vardir/tmp/t2.csv";
33+
my $fh = gensym();
34+
open($fh, ">$file")
35+
or die "$file: open for write failed: $!";
36+
for (my $i = 0; $i < 400000; $i++) {
37+
print $fh 10*$i, "\n";
38+
}
39+
close($fh)
40+
or "$file: close after write failed: $!";
41+
exit(0);
42+
EOF
43+
2744
create table t1 (
2845
a int not null,
2946
b int not null,
@@ -32,26 +49,56 @@ create table t1 (
3249

3350
create table t1ver like t1;
3451

35-
show create table t1;
36-
show create table t1ver;
37-
38-
# crash node 2 in 5-15 seconds
52+
create table t2 (
53+
b int not null,
54+
# unique key would turn into pk
55+
key (b)
56+
) engine ndb;
3957

40-
exec $NDB_MGM --no-defaults --ndb-connectstring="$NDB_CONNECTSTRING"
41-
-e "2 dump 9999 5000 15000" >> NDB_TOOLS_OUTPUT 2>&1;
58+
create table t2ver like t2;
4259

43-
exec $NDB_IMPORT --state-dir=$MYSQLTEST_VARDIR/tmp
44-
--temperrors=1000
45-
test $MYSQLTEST_VARDIR/tmp/t1.csv >> $NDB_TOOLS_OUTPUT 2>&1;
60+
--echo # load t1ver, t2ver
4661

47-
select count(*) from t1;
62+
# load first to avoid testing LOAD DATA node failure
4863

4964
--disable_query_log
5065
eval load data infile '$MYSQLTEST_VARDIR/tmp/t1.csv'
5166
into table t1ver;
5267
--enable_query_log
5368

69+
--disable_query_log
70+
eval load data infile '$MYSQLTEST_VARDIR/tmp/t2.csv'
71+
into table t2ver;
72+
--enable_query_log
73+
74+
# crash node 2 in 5-25 seconds, may hit t1 or t2
75+
76+
exec $NDB_MGM --no-defaults --ndb-connectstring="$NDB_CONNECTSTRING"
77+
-e "2 dump 9999 5000 25000" >> NDB_TOOLS_OUTPUT 2>&1;
78+
79+
--echo # import t1, t2
80+
81+
exec $NDB_IMPORT --state-dir=$MYSQLTEST_VARDIR/tmp
82+
--temperrors=1000 --verbose=1
83+
test
84+
$MYSQLTEST_VARDIR/tmp/t1.csv
85+
$MYSQLTEST_VARDIR/tmp/t2.csv >> $NDB_TOOLS_OUTPUT 2>&1;
86+
87+
--echo # verify t1, t2
88+
89+
# node failure may hit these too
90+
--disable_warnings
91+
92+
select count(*) from t1;
93+
select count(*) from t1ver;
5494
select count(*) from t1 x, t1ver y
5595
where x.a = y.a and x.b = y.b;
5696

57-
drop table t1, t1ver;
97+
select count(*) from t2;
98+
select count(*) from t2ver;
99+
select count(*) from t2 x, t2ver y
100+
where x.b = y.b;
101+
102+
drop table t1, t1ver, t2, t2ver;
103+
104+
--enable_warnings

storage/ndb/tools/NdbImportImpl.cpp

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2271,6 +2271,18 @@ NdbImportImpl::RelayOpWorker::do_run()
22712271
opt.m_ai_increment,
22722272
opt.m_ai_offset) == -1)
22732273
{
2274+
const NdbError& ndberror = m_ndb->getNdbError();
2275+
require(ndberror.code != 0);
2276+
if (ndberror.status == NdbError::TemporaryError)
2277+
{
2278+
log1("getAutoIncrementValue: " << ndberror);
2279+
rows_in.lock();
2280+
log1("push back to input: rowid " << row->m_rowid);
2281+
rows_in.push_back_force(row);
2282+
rows_in.unlock();
2283+
NdbSleep_MilliSleep(opt.m_tempdelay);
2284+
return;
2285+
}
22742286
m_util.set_error_ndb(m_error, __LINE__, m_ndb->getNdbError(),
22752287
"table %s: get autoincrement failed",
22762288
table.m_tab->getName());

0 commit comments

Comments
 (0)