@ -2,13 +2,13 @@
// Use of this source code is governed by a BSD-style license that can be
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. See the AUTHORS file for names of contributors.
// found in the LICENSE file. See the AUTHORS file for names of contributors.
# include "third_party/googletest/googletest/include/gtest/gtest.h"
# include "db/log_reader.h"
# include "db/log_reader.h"
# include "db/log_writer.h"
# include "db/log_writer.h"
# include "leveldb/env.h"
# include "leveldb/env.h"
# include "util/coding.h"
# include "util/coding.h"
# include "util/crc32c.h"
# include "util/crc32c.h"
# include "util/random.h"
# include "util/random.h"
# include "util/testharness.h"
namespace leveldb {
namespace leveldb {
namespace log {
namespace log {
@ -36,7 +36,7 @@ static std::string RandomSkewedString(int i, Random* rnd) {
return BigString ( NumberString ( i ) , rnd - > Skewed ( 17 ) ) ;
return BigString ( NumberString ( i ) , rnd - > Skewed ( 17 ) ) ;
}
}
class LogTest {
class LogTest : public testing : : Test {
public :
public :
LogTest ( )
LogTest ( )
: reading_ ( false ) ,
: reading_ ( false ) ,
@ -177,7 +177,7 @@ class LogTest {
StringSource ( ) : force_error_ ( false ) , returned_partial_ ( false ) { }
StringSource ( ) : force_error_ ( false ) , returned_partial_ ( false ) { }
Status Read ( size_t n , Slice * result , char * scratch ) override {
Status Read ( size_t n , Slice * result , char * scratch ) override {
ASSER T_TRUE( ! returned_partial_ ) < < " must not Read() after eof/error " ;
EXPEC T_TRUE( ! returned_partial_ ) < < " must not Read() after eof/error " ;
if ( force_error_ ) {
if ( force_error_ ) {
force_error_ = false ;
force_error_ = false ;
@ -258,9 +258,9 @@ uint64_t LogTest::initial_offset_last_record_offsets_[] = {
int LogTest : : num_initial_offset_records_ =
int LogTest : : num_initial_offset_records_ =
sizeof ( LogTest : : initial_offset_last_record_offsets_ ) / sizeof ( uint64_t ) ;
sizeof ( LogTest : : initial_offset_last_record_offsets_ ) / sizeof ( uint64_t ) ;
TEST ( LogTest , Empty ) { ASSERT_EQ ( " EOF " , Read ( ) ) ; }
TEST_F ( LogTest , Empty ) { ASSERT_EQ ( " EOF " , Read ( ) ) ; }
TEST ( LogTest , ReadWrite ) {
TEST_F ( LogTest , ReadWrite ) {
Write ( " foo " ) ;
Write ( " foo " ) ;
Write ( " bar " ) ;
Write ( " bar " ) ;
Write ( " " ) ;
Write ( " " ) ;
@ -273,7 +273,7 @@ TEST(LogTest, ReadWrite) {
ASSERT_EQ ( " EOF " , Read ( ) ) ; // Make sure reads at eof work
ASSERT_EQ ( " EOF " , Read ( ) ) ; // Make sure reads at eof work
}
}
TEST ( LogTest , ManyBlocks ) {
TEST_F ( LogTest , ManyBlocks ) {
for ( int i = 0 ; i < 100000 ; i + + ) {
for ( int i = 0 ; i < 100000 ; i + + ) {
Write ( NumberString ( i ) ) ;
Write ( NumberString ( i ) ) ;
}
}
@ -283,7 +283,7 @@ TEST(LogTest, ManyBlocks) {
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
}
}
TEST ( LogTest , Fragmentation ) {
TEST_F ( LogTest , Fragmentation ) {
Write ( " small " ) ;
Write ( " small " ) ;
Write ( BigString ( " medium " , 50000 ) ) ;
Write ( BigString ( " medium " , 50000 ) ) ;
Write ( BigString ( " large " , 100000 ) ) ;
Write ( BigString ( " large " , 100000 ) ) ;
@ -293,7 +293,7 @@ TEST(LogTest, Fragmentation) {
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
}
}
TEST ( LogTest , MarginalTrailer ) {
TEST_F ( LogTest , MarginalTrailer ) {
// Make a trailer that is exactly the same length as an empty record.
// Make a trailer that is exactly the same length as an empty record.
const int n = kBlockSize - 2 * kHeaderSize ;
const int n = kBlockSize - 2 * kHeaderSize ;
Write ( BigString ( " foo " , n ) ) ;
Write ( BigString ( " foo " , n ) ) ;
@ -306,7 +306,7 @@ TEST(LogTest, MarginalTrailer) {
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
}
}
TEST ( LogTest , MarginalTrailer2 ) {
TEST_F ( LogTest , MarginalTrailer2 ) {
// Make a trailer that is exactly the same length as an empty record.
// Make a trailer that is exactly the same length as an empty record.
const int n = kBlockSize - 2 * kHeaderSize ;
const int n = kBlockSize - 2 * kHeaderSize ;
Write ( BigString ( " foo " , n ) ) ;
Write ( BigString ( " foo " , n ) ) ;
@ -319,7 +319,7 @@ TEST(LogTest, MarginalTrailer2) {
ASSERT_EQ ( " " , ReportMessage ( ) ) ;
ASSERT_EQ ( " " , ReportMessage ( ) ) ;
}
}
TEST ( LogTest , ShortTrailer ) {
TEST_F ( LogTest , ShortTrailer ) {
const int n = kBlockSize - 2 * kHeaderSize + 4 ;
const int n = kBlockSize - 2 * kHeaderSize + 4 ;
Write ( BigString ( " foo " , n ) ) ;
Write ( BigString ( " foo " , n ) ) ;
ASSERT_EQ ( kBlockSize - kHeaderSize + 4 , WrittenBytes ( ) ) ;
ASSERT_EQ ( kBlockSize - kHeaderSize + 4 , WrittenBytes ( ) ) ;
@ -331,7 +331,7 @@ TEST(LogTest, ShortTrailer) {
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
}
}
TEST ( LogTest , AlignedEof ) {
TEST_F ( LogTest , AlignedEof ) {
const int n = kBlockSize - 2 * kHeaderSize + 4 ;
const int n = kBlockSize - 2 * kHeaderSize + 4 ;
Write ( BigString ( " foo " , n ) ) ;
Write ( BigString ( " foo " , n ) ) ;
ASSERT_EQ ( kBlockSize - kHeaderSize + 4 , WrittenBytes ( ) ) ;
ASSERT_EQ ( kBlockSize - kHeaderSize + 4 , WrittenBytes ( ) ) ;
@ -339,7 +339,7 @@ TEST(LogTest, AlignedEof) {
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
}
}
TEST ( LogTest , OpenForAppend ) {
TEST_F ( LogTest , OpenForAppend ) {
Write ( " hello " ) ;
Write ( " hello " ) ;
ReopenForAppend ( ) ;
ReopenForAppend ( ) ;
Write ( " world " ) ;
Write ( " world " ) ;
@ -348,7 +348,7 @@ TEST(LogTest, OpenForAppend) {
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
}
}
TEST ( LogTest , RandomRead ) {
TEST_F ( LogTest , RandomRead ) {
const int N = 500 ;
const int N = 500 ;
Random write_rnd ( 301 ) ;
Random write_rnd ( 301 ) ;
for ( int i = 0 ; i < N ; i + + ) {
for ( int i = 0 ; i < N ; i + + ) {
@ -363,7 +363,7 @@ TEST(LogTest, RandomRead) {
// Tests of all the error paths in log_reader.cc follow:
// Tests of all the error paths in log_reader.cc follow:
TEST ( LogTest , ReadError ) {
TEST_F ( LogTest , ReadError ) {
Write ( " foo " ) ;
Write ( " foo " ) ;
ForceError ( ) ;
ForceError ( ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
@ -371,7 +371,7 @@ TEST(LogTest, ReadError) {
ASSERT_EQ ( " OK " , MatchError ( " read error " ) ) ;
ASSERT_EQ ( " OK " , MatchError ( " read error " ) ) ;
}
}
TEST ( LogTest , BadRecordType ) {
TEST_F ( LogTest , BadRecordType ) {
Write ( " foo " ) ;
Write ( " foo " ) ;
// Type is stored in header[6]
// Type is stored in header[6]
IncrementByte ( 6 , 100 ) ;
IncrementByte ( 6 , 100 ) ;
@ -381,7 +381,7 @@ TEST(LogTest, BadRecordType) {
ASSERT_EQ ( " OK " , MatchError ( " unknown record type " ) ) ;
ASSERT_EQ ( " OK " , MatchError ( " unknown record type " ) ) ;
}
}
TEST ( LogTest , TruncatedTrailingRecordIsIgnored ) {
TEST_F ( LogTest , TruncatedTrailingRecordIsIgnored ) {
Write ( " foo " ) ;
Write ( " foo " ) ;
ShrinkSize ( 4 ) ; // Drop all payload as well as a header byte
ShrinkSize ( 4 ) ; // Drop all payload as well as a header byte
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
@ -390,7 +390,7 @@ TEST(LogTest, TruncatedTrailingRecordIsIgnored) {
ASSERT_EQ ( " " , ReportMessage ( ) ) ;
ASSERT_EQ ( " " , ReportMessage ( ) ) ;
}
}
TEST ( LogTest , BadLength ) {
TEST_F ( LogTest , BadLength ) {
const int kPayloadSize = kBlockSize - kHeaderSize ;
const int kPayloadSize = kBlockSize - kHeaderSize ;
Write ( BigString ( " bar " , kPayloadSize ) ) ;
Write ( BigString ( " bar " , kPayloadSize ) ) ;
Write ( " foo " ) ;
Write ( " foo " ) ;
@ -401,7 +401,7 @@ TEST(LogTest, BadLength) {
ASSERT_EQ ( " OK " , MatchError ( " bad record length " ) ) ;
ASSERT_EQ ( " OK " , MatchError ( " bad record length " ) ) ;
}
}
TEST ( LogTest , BadLengthAtEndIsIgnored ) {
TEST_F ( LogTest , BadLengthAtEndIsIgnored ) {
Write ( " foo " ) ;
Write ( " foo " ) ;
ShrinkSize ( 1 ) ;
ShrinkSize ( 1 ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
@ -409,7 +409,7 @@ TEST(LogTest, BadLengthAtEndIsIgnored) {
ASSERT_EQ ( " " , ReportMessage ( ) ) ;
ASSERT_EQ ( " " , ReportMessage ( ) ) ;
}
}
TEST ( LogTest , ChecksumMismatch ) {
TEST_F ( LogTest , ChecksumMismatch ) {
Write ( " foo " ) ;
Write ( " foo " ) ;
IncrementByte ( 0 , 10 ) ;
IncrementByte ( 0 , 10 ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
@ -417,7 +417,7 @@ TEST(LogTest, ChecksumMismatch) {
ASSERT_EQ ( " OK " , MatchError ( " checksum mismatch " ) ) ;
ASSERT_EQ ( " OK " , MatchError ( " checksum mismatch " ) ) ;
}
}
TEST ( LogTest , UnexpectedMiddleType ) {
TEST_F ( LogTest , UnexpectedMiddleType ) {
Write ( " foo " ) ;
Write ( " foo " ) ;
SetByte ( 6 , kMiddleType ) ;
SetByte ( 6 , kMiddleType ) ;
FixChecksum ( 0 , 3 ) ;
FixChecksum ( 0 , 3 ) ;
@ -426,7 +426,7 @@ TEST(LogTest, UnexpectedMiddleType) {
ASSERT_EQ ( " OK " , MatchError ( " missing start " ) ) ;
ASSERT_EQ ( " OK " , MatchError ( " missing start " ) ) ;
}
}
TEST ( LogTest , UnexpectedLastType ) {
TEST_F ( LogTest , UnexpectedLastType ) {
Write ( " foo " ) ;
Write ( " foo " ) ;
SetByte ( 6 , kLastType ) ;
SetByte ( 6 , kLastType ) ;
FixChecksum ( 0 , 3 ) ;
FixChecksum ( 0 , 3 ) ;
@ -435,7 +435,7 @@ TEST(LogTest, UnexpectedLastType) {
ASSERT_EQ ( " OK " , MatchError ( " missing start " ) ) ;
ASSERT_EQ ( " OK " , MatchError ( " missing start " ) ) ;
}
}
TEST ( LogTest , UnexpectedFullType ) {
TEST_F ( LogTest , UnexpectedFullType ) {
Write ( " foo " ) ;
Write ( " foo " ) ;
Write ( " bar " ) ;
Write ( " bar " ) ;
SetByte ( 6 , kFirstType ) ;
SetByte ( 6 , kFirstType ) ;
@ -446,7 +446,7 @@ TEST(LogTest, UnexpectedFullType) {
ASSERT_EQ ( " OK " , MatchError ( " partial record without end " ) ) ;
ASSERT_EQ ( " OK " , MatchError ( " partial record without end " ) ) ;
}
}
TEST ( LogTest , UnexpectedFirstType ) {
TEST_F ( LogTest , UnexpectedFirstType ) {
Write ( " foo " ) ;
Write ( " foo " ) ;
Write ( BigString ( " bar " , 100000 ) ) ;
Write ( BigString ( " bar " , 100000 ) ) ;
SetByte ( 6 , kFirstType ) ;
SetByte ( 6 , kFirstType ) ;
@ -457,7 +457,7 @@ TEST(LogTest, UnexpectedFirstType) {
ASSERT_EQ ( " OK " , MatchError ( " partial record without end " ) ) ;
ASSERT_EQ ( " OK " , MatchError ( " partial record without end " ) ) ;
}
}
TEST ( LogTest , MissingLastIsIgnored ) {
TEST_F ( LogTest , MissingLastIsIgnored ) {
Write ( BigString ( " bar " , kBlockSize ) ) ;
Write ( BigString ( " bar " , kBlockSize ) ) ;
// Remove the LAST block, including header.
// Remove the LAST block, including header.
ShrinkSize ( 14 ) ;
ShrinkSize ( 14 ) ;
@ -466,7 +466,7 @@ TEST(LogTest, MissingLastIsIgnored) {
ASSERT_EQ ( 0 , DroppedBytes ( ) ) ;
ASSERT_EQ ( 0 , DroppedBytes ( ) ) ;
}
}
TEST ( LogTest , PartialLastIsIgnored ) {
TEST_F ( LogTest , PartialLastIsIgnored ) {
Write ( BigString ( " bar " , kBlockSize ) ) ;
Write ( BigString ( " bar " , kBlockSize ) ) ;
// Cause a bad record length in the LAST block.
// Cause a bad record length in the LAST block.
ShrinkSize ( 1 ) ;
ShrinkSize ( 1 ) ;
@ -475,7 +475,7 @@ TEST(LogTest, PartialLastIsIgnored) {
ASSERT_EQ ( 0 , DroppedBytes ( ) ) ;
ASSERT_EQ ( 0 , DroppedBytes ( ) ) ;
}
}
TEST ( LogTest , SkipIntoMultiRecord ) {
TEST_F ( LogTest , SkipIntoMultiRecord ) {
// Consider a fragmented record:
// Consider a fragmented record:
// first(R1), middle(R1), last(R1), first(R2)
// first(R1), middle(R1), last(R1), first(R2)
// If initial_offset points to a record after first(R1) but before first(R2)
// If initial_offset points to a record after first(R1) but before first(R2)
@ -491,7 +491,7 @@ TEST(LogTest, SkipIntoMultiRecord) {
ASSERT_EQ ( " EOF " , Read ( ) ) ;
ASSERT_EQ ( " EOF " , Read ( ) ) ;
}
}
TEST ( LogTest , ErrorJoinsRecords ) {
TEST_F ( LogTest , ErrorJoinsRecords ) {
// Consider two fragmented records:
// Consider two fragmented records:
// first(R1) last(R1) first(R2) last(R2)
// first(R1) last(R1) first(R2) last(R2)
// where the middle two fragments disappear. We do not want
// where the middle two fragments disappear. We do not want
@ -514,47 +514,50 @@ TEST(LogTest, ErrorJoinsRecords) {
ASSERT_GE ( dropped , 2 * kBlockSize ) ;
ASSERT_GE ( dropped , 2 * kBlockSize ) ;
}
}
TEST ( LogTest , ReadStart ) { CheckInitialOffsetRecord ( 0 , 0 ) ; }
TEST_F ( LogTest , ReadStart ) { CheckInitialOffsetRecord ( 0 , 0 ) ; }
TEST ( LogTest , ReadSecondOneOff ) { CheckInitialOffsetRecord ( 1 , 1 ) ; }
TEST_F ( LogTest , ReadSecondOneOff ) { CheckInitialOffsetRecord ( 1 , 1 ) ; }
TEST ( LogTest , ReadSecondTenThousand ) { CheckInitialOffsetRecord ( 10000 , 1 ) ; }
TEST_F ( LogTest , ReadSecondTenThousand ) { CheckInitialOffsetRecord ( 10000 , 1 ) ; }
TEST ( LogTest , ReadSecondStart ) { CheckInitialOffsetRecord ( 10007 , 1 ) ; }
TEST_F ( LogTest , ReadSecondStart ) { CheckInitialOffsetRecord ( 10007 , 1 ) ; }
TEST ( LogTest , ReadThirdOneOff ) { CheckInitialOffsetRecord ( 10008 , 2 ) ; }
TEST_F ( LogTest , ReadThirdOneOff ) { CheckInitialOffsetRecord ( 10008 , 2 ) ; }
TEST ( LogTest , ReadThirdStart ) { CheckInitialOffsetRecord ( 20014 , 2 ) ; }
TEST_F ( LogTest , ReadThirdStart ) { CheckInitialOffsetRecord ( 20014 , 2 ) ; }
TEST ( LogTest , ReadFourthOneOff ) { CheckInitialOffsetRecord ( 20015 , 3 ) ; }
TEST_F ( LogTest , ReadFourthOneOff ) { CheckInitialOffsetRecord ( 20015 , 3 ) ; }
TEST ( LogTest , ReadFourthFirstBlockTrailer ) {
TEST_F ( LogTest , ReadFourthFirstBlockTrailer ) {
CheckInitialOffsetRecord ( log : : kBlockSize - 4 , 3 ) ;
CheckInitialOffsetRecord ( log : : kBlockSize - 4 , 3 ) ;
}
}
TEST ( LogTest , ReadFourthMiddleBlock ) {
TEST_F ( LogTest , ReadFourthMiddleBlock ) {
CheckInitialOffsetRecord ( log : : kBlockSize + 1 , 3 ) ;
CheckInitialOffsetRecord ( log : : kBlockSize + 1 , 3 ) ;
}
}
TEST ( LogTest , ReadFourthLastBlock ) {
TEST_F ( LogTest , ReadFourthLastBlock ) {
CheckInitialOffsetRecord ( 2 * log : : kBlockSize + 1 , 3 ) ;
CheckInitialOffsetRecord ( 2 * log : : kBlockSize + 1 , 3 ) ;
}
}
TEST ( LogTest , ReadFourthStart ) {
TEST_F ( LogTest , ReadFourthStart ) {
CheckInitialOffsetRecord (
CheckInitialOffsetRecord (
2 * ( kHeaderSize + 1000 ) + ( 2 * log : : kBlockSize - 1000 ) + 3 * kHeaderSize ,
2 * ( kHeaderSize + 1000 ) + ( 2 * log : : kBlockSize - 1000 ) + 3 * kHeaderSize ,
3 ) ;
3 ) ;
}
}
TEST ( LogTest , ReadInitialOffsetIntoBlockPadding ) {
TEST_F ( LogTest , ReadInitialOffsetIntoBlockPadding ) {
CheckInitialOffsetRecord ( 3 * log : : kBlockSize - 3 , 5 ) ;
CheckInitialOffsetRecord ( 3 * log : : kBlockSize - 3 , 5 ) ;
}
}
TEST ( LogTest , ReadEnd ) { CheckOffsetPastEndReturnsNoRecords ( 0 ) ; }
TEST_F ( LogTest , ReadEnd ) { CheckOffsetPastEndReturnsNoRecords ( 0 ) ; }
TEST ( LogTest , ReadPastEnd ) { CheckOffsetPastEndReturnsNoRecords ( 5 ) ; }
TEST_F ( LogTest , ReadPastEnd ) { CheckOffsetPastEndReturnsNoRecords ( 5 ) ; }
} // namespace log
} // namespace log
} // namespace leveldb
} // namespace leveldb
int main ( int argc , char * * argv ) { return leveldb : : test : : RunAllTests ( ) ; }
int main ( int argc , char * * argv ) {
testing : : InitGoogleTest ( & argc , argv ) ;
return RUN_ALL_TESTS ( ) ;
}