More enum class usages.

GitOrigin-RevId: 6680ce2276ced5b015a1f508f0b7b5f28b5f4a9b
This commit is contained in:
levlam 2020-01-19 03:02:56 +03:00
parent 8350443093
commit 728d8d79f6
17 changed files with 112 additions and 98 deletions

View File

@ -259,12 +259,12 @@ class CancellablePromise : public PromiseT {
template <class ValueT, class FunctionOkT, class FunctionFailT> template <class ValueT, class FunctionOkT, class FunctionFailT>
class LambdaPromise : public PromiseInterface<ValueT> { class LambdaPromise : public PromiseInterface<ValueT> {
enum OnFail { None, Ok, Fail }; enum class OnFail { None, Ok, Fail };
public: public:
void set_value(ValueT &&value) override { void set_value(ValueT &&value) override {
ok_(std::move(value)); ok_(std::move(value));
on_fail_ = None; on_fail_ = OnFail::None;
} }
void set_error(Status &&error) override { void set_error(Status &&error) override {
do_error(std::move(error)); do_error(std::move(error));
@ -279,13 +279,15 @@ class LambdaPromise : public PromiseInterface<ValueT> {
template <class FromOkT, class FromFailT> template <class FromOkT, class FromFailT>
LambdaPromise(FromOkT &&ok, FromFailT &&fail, bool use_ok_as_fail) LambdaPromise(FromOkT &&ok, FromFailT &&fail, bool use_ok_as_fail)
: ok_(std::forward<FromOkT>(ok)), fail_(std::forward<FromFailT>(fail)), on_fail_(use_ok_as_fail ? Ok : Fail) { : ok_(std::forward<FromOkT>(ok))
, fail_(std::forward<FromFailT>(fail))
, on_fail_(use_ok_as_fail ? OnFail::Ok : OnFail::Fail) {
} }
private: private:
FunctionOkT ok_; FunctionOkT ok_;
FunctionFailT fail_; FunctionFailT fail_;
OnFail on_fail_ = None; OnFail on_fail_ = OnFail::None;
template <class FuncT, class ArgT = detail::get_arg_t<FuncT>> template <class FuncT, class ArgT = detail::get_arg_t<FuncT>>
std::enable_if_t<std::is_assignable<ArgT, Status>::value> do_error_impl(FuncT &func, Status &&status) { std::enable_if_t<std::is_assignable<ArgT, Status>::value> do_error_impl(FuncT &func, Status &&status) {
@ -299,16 +301,16 @@ class LambdaPromise : public PromiseInterface<ValueT> {
void do_error(Status &&error) { void do_error(Status &&error) {
switch (on_fail_) { switch (on_fail_) {
case None: case OnFail::None:
break; break;
case Ok: case OnFail::Ok:
do_error_impl(ok_, std::move(error)); do_error_impl(ok_, std::move(error));
break; break;
case Fail: case OnFail::Fail:
fail_(std::move(error)); fail_(std::move(error));
break; break;
} }
on_fail_ = None; on_fail_ = OnFail::None;
} }
}; };

View File

@ -12,42 +12,49 @@
namespace td { namespace td {
class DbKey { class DbKey {
public: enum class Type { Empty, RawKey, Password };
enum Type { Empty, RawKey, Password };
Type type() const { Type type() const {
return type_; return type_;
} }
public:
bool is_empty() const { bool is_empty() const {
return type_ == Empty; return type_ == Type::Empty;
} }
bool is_raw_key() const { bool is_raw_key() const {
return type_ == RawKey; return type_ == Type::RawKey;
} }
bool is_password() const { bool is_password() const {
return type_ == Password; return type_ == Type::Password;
} }
CSlice data() const { CSlice data() const {
return data_; return data_;
} }
static DbKey raw_key(string raw_key) { static DbKey raw_key(string raw_key) {
DbKey res; DbKey res;
res.type_ = RawKey; res.type_ = Type::RawKey;
res.data_ = std::move(raw_key); res.data_ = std::move(raw_key);
return res; return res;
} }
static DbKey password(string password) { static DbKey password(string password) {
DbKey res; DbKey res;
res.type_ = Password; res.type_ = Type::Password;
res.data_ = std::move(password); res.data_ = std::move(password);
return res; return res;
} }
static DbKey empty() { static DbKey empty() {
return DbKey(); return DbKey();
} }
private: private:
Type type_{Empty}; Type type_{Type::Empty};
string data_; string data_;
}; };

View File

@ -169,11 +169,11 @@ SqliteStatement::Datatype SqliteStatement::view_datatype(int id) {
void SqliteStatement::reset() { void SqliteStatement::reset() {
sqlite3_reset(stmt_.get()); sqlite3_reset(stmt_.get());
state_ = Start; state_ = State::Start;
} }
Status SqliteStatement::step() { Status SqliteStatement::step() {
if (state_ == Finish) { if (state_ == State::Finish) {
return Status::Error("One has to reset statement"); return Status::Error("One has to reset statement");
} }
VLOG(sqlite) << "Start step " << tag("query", sqlite3_sql(stmt_.get())) << tag("statement", stmt_.get()) VLOG(sqlite) << "Start step " << tag("query", sqlite3_sql(stmt_.get())) << tag("statement", stmt_.get())
@ -182,14 +182,14 @@ Status SqliteStatement::step() {
VLOG(sqlite) << "Finish step " << tag("query", sqlite3_sql(stmt_.get())) << tag("statement", stmt_.get()) VLOG(sqlite) << "Finish step " << tag("query", sqlite3_sql(stmt_.get())) << tag("statement", stmt_.get())
<< tag("database", db_.get()); << tag("database", db_.get());
if (rc == SQLITE_ROW) { if (rc == SQLITE_ROW) {
state_ = GotRow; state_ = State::GotRow;
return Status::OK(); return Status::OK();
} }
state_ = State::Finish;
if (rc == SQLITE_DONE) { if (rc == SQLITE_DONE) {
state_ = Finish;
return Status::OK(); return Status::OK();
} }
state_ = Finish;
return last_error(); return last_error();
} }

View File

@ -44,10 +44,10 @@ class SqliteStatement {
Result<string> explain(); Result<string> explain();
bool can_step() const { bool can_step() const {
return state_ != Finish; return state_ != State::Finish;
} }
bool has_row() const { bool has_row() const {
return state_ == GotRow; return state_ == State::GotRow;
} }
bool empty() const { bool empty() const {
return !stmt_; return !stmt_;
@ -72,7 +72,8 @@ class SqliteStatement {
void operator()(sqlite3_stmt *stmt); void operator()(sqlite3_stmt *stmt);
}; };
enum { Start, GotRow, Finish } state_ = Start; enum class State { Start, GotRow, Finish };
State state_ = State::Start;
std::unique_ptr<sqlite3_stmt, StmtDeleter> stmt_; std::unique_ptr<sqlite3_stmt, StmtDeleter> stmt_;
std::shared_ptr<detail::RawSqliteDb> db_; std::shared_ptr<detail::RawSqliteDb> db_;

View File

@ -108,7 +108,7 @@ class BinlogReader {
return offset_; return offset_;
} }
Result<size_t> read_next(BinlogEvent *event) { Result<size_t> read_next(BinlogEvent *event) {
if (state_ == ReadLength) { if (state_ == State::ReadLength) {
if (input_->size() < 4) { if (input_->size() < 4) {
return 4; return 4;
} }
@ -129,7 +129,7 @@ class BinlogReader {
<< expected_size_ << ' ' << tag("is_encrypted", is_encrypted_) << expected_size_ << ' ' << tag("is_encrypted", is_encrypted_)
<< format::as_hex_dump<4>(Slice(input_->prepare_read().truncate(28)))); << format::as_hex_dump<4>(Slice(input_->prepare_read().truncate(28))));
} }
state_ = ReadEvent; state_ = State::ReadEvent;
} }
if (input_->size() < size_) { if (input_->size() < size_) {
@ -140,13 +140,14 @@ class BinlogReader {
TRY_STATUS(event->init(input_->cut_head(size_).move_as_buffer_slice())); TRY_STATUS(event->init(input_->cut_head(size_).move_as_buffer_slice()));
offset_ += size_; offset_ += size_;
event->offset_ = offset_; event->offset_ = offset_;
state_ = ReadLength; state_ = State::ReadLength;
return 0; return 0;
} }
private: private:
ChainBufferReader *input_; ChainBufferReader *input_;
enum { ReadLength, ReadEvent } state_ = ReadLength; enum class State { ReadLength, ReadEvent };
State state_ = State::ReadLength;
size_t size_{0}; size_t size_{0};
int64 offset_{0}; int64 offset_{0};
int64 expected_size_{0}; int64 expected_size_{0};

View File

@ -18,7 +18,7 @@ void HttpChunkedByteFlow::loop() {
bool was_updated = false; bool was_updated = false;
size_t need_size; size_t need_size;
while (true) { while (true) {
if (state_ == ReadChunkLength) { if (state_ == State::ReadChunkLength) {
bool ok = find_boundary(input_->clone(), "\r\n", len_); bool ok = find_boundary(input_->clone(), "\r\n", len_);
if (len_ > 10) { if (len_ > 10) {
return finish(Status::Error(PSLICE() << "Too long length in chunked " return finish(Status::Error(PSLICE() << "Too long length in chunked "
@ -35,7 +35,7 @@ void HttpChunkedByteFlow::loop() {
return finish(Status::Error(PSLICE() << "Invalid chunk size " << tag("size", len_))); return finish(Status::Error(PSLICE() << "Invalid chunk size " << tag("size", len_)));
} }
save_len_ = len_; save_len_ = len_;
state_ = ReadChunkContent; state_ = State::ReadChunkContent;
} }
auto size = input_->size(); auto size = input_->size();
@ -67,7 +67,7 @@ void HttpChunkedByteFlow::loop() {
if (save_len_ == 0) { if (save_len_ == 0) {
return finish(Status::OK()); return finish(Status::OK());
} }
state_ = ReadChunkLength; state_ = State::ReadChunkLength;
len_ = 0; len_ = 0;
} }
} }

View File

@ -18,7 +18,8 @@ class HttpChunkedByteFlow final : public ByteFlowBase {
static constexpr int MAX_CHUNK_SIZE = 15 << 20; // some reasonable limit static constexpr int MAX_CHUNK_SIZE = 15 << 20; // some reasonable limit
static constexpr int MAX_SIZE = 150 << 20; // some reasonable limit static constexpr int MAX_SIZE = 150 << 20; // some reasonable limit
static constexpr size_t MIN_UPDATE_SIZE = 1 << 14; static constexpr size_t MIN_UPDATE_SIZE = 1 << 14;
enum { ReadChunkLength, ReadChunkContent, OK } state_ = ReadChunkLength; enum class State { ReadChunkLength, ReadChunkContent, OK };
State state_ = State::ReadChunkLength;
size_t len_ = 0; size_t len_ = 0;
size_t save_len_ = 0; size_t save_len_ = 0;
size_t total_size_ = 0; size_t total_size_ = 0;

View File

@ -50,7 +50,7 @@ static MutableSlice urldecode_inplace(MutableSlice str, bool decode_plus_sign_as
void HttpReader::init(ChainBufferReader *input, size_t max_post_size, size_t max_files) { void HttpReader::init(ChainBufferReader *input, size_t max_post_size, size_t max_files) {
input_ = input; input_ = input;
state_ = ReadHeaders; state_ = State::ReadHeaders;
headers_read_length_ = 0; headers_read_length_ = 0;
content_length_ = 0; content_length_ = 0;
query_ = nullptr; query_ = nullptr;
@ -67,7 +67,7 @@ Result<size_t> HttpReader::read_next(HttpQuery *query) {
} }
size_t need_size = input_->size() + 1; size_t need_size = input_->size() + 1;
while (true) { while (true) {
if (state_ != ReadHeaders) { if (state_ != State::ReadHeaders) {
flow_source_.wakeup(); flow_source_.wakeup();
if (flow_sink_.is_ready() && flow_sink_.status().is_error()) { if (flow_sink_.is_ready() && flow_sink_.status().is_error()) {
if (!temp_file_.empty()) { if (!temp_file_.empty()) {
@ -81,7 +81,7 @@ Result<size_t> HttpReader::read_next(HttpQuery *query) {
} }
} }
switch (state_) { switch (state_) {
case ReadHeaders: { case State::ReadHeaders: {
auto result = split_header(); auto result = split_header();
if (result.is_error() || result.ok() != 0) { if (result.is_error() || result.ok() != 0) {
return result; return result;
@ -107,7 +107,7 @@ Result<size_t> HttpReader::read_next(HttpQuery *query) {
if (content_encoding_.empty()) { if (content_encoding_.empty()) {
} else if (content_encoding_ == "gzip" || content_encoding_ == "deflate") { } else if (content_encoding_ == "gzip" || content_encoding_ == "deflate") {
gzip_flow_ = GzipByteFlow(Gzip::Decode); gzip_flow_ = GzipByteFlow(Gzip::Mode::Decode);
gzip_flow_.set_max_output_size(MAX_FILE_SIZE); gzip_flow_.set_max_output_size(MAX_FILE_SIZE);
*source >> gzip_flow_; *source >> gzip_flow_;
source = &gzip_flow_; source = &gzip_flow_;
@ -125,7 +125,7 @@ Result<size_t> HttpReader::read_next(HttpQuery *query) {
} }
if (content_type_lowercased_.find("multipart/form-data") != string::npos) { if (content_type_lowercased_.find("multipart/form-data") != string::npos) {
state_ = ReadMultipartFormData; state_ = State::ReadMultipartFormData;
const char *p = std::strstr(content_type_lowercased_.c_str(), "boundary"); const char *p = std::strstr(content_type_lowercased_.c_str(), "boundary");
if (p == nullptr) { if (p == nullptr) {
@ -154,21 +154,21 @@ Result<size_t> HttpReader::read_next(HttpQuery *query) {
} }
boundary_ = "\r\n--" + boundary.str(); boundary_ = "\r\n--" + boundary.str();
form_data_parse_state_ = SkipPrologue; form_data_parse_state_ = FormDataParseState::SkipPrologue;
form_data_read_length_ = 0; form_data_read_length_ = 0;
form_data_skipped_length_ = 0; form_data_skipped_length_ = 0;
} else if (content_type_lowercased_.find("application/x-www-form-urlencoded") != string::npos || } else if (content_type_lowercased_.find("application/x-www-form-urlencoded") != string::npos ||
content_type_lowercased_.find("application/json") != string::npos) { content_type_lowercased_.find("application/json") != string::npos) {
state_ = ReadArgs; state_ = State::ReadArgs;
} else { } else {
form_data_skipped_length_ = 0; form_data_skipped_length_ = 0;
state_ = ReadContent; state_ = State::ReadContent;
} }
continue; continue;
} }
case ReadContent: { case State::ReadContent: {
if (content_->size() > max_post_size_) { if (content_->size() > max_post_size_) {
state_ = ReadContentToFile; state_ = State::ReadContentToFile;
continue; continue;
} }
if (flow_sink_.is_ready()) { if (flow_sink_.is_ready()) {
@ -180,7 +180,7 @@ Result<size_t> HttpReader::read_next(HttpQuery *query) {
return need_size; return need_size;
} }
case ReadContentToFile: { case State::ReadContentToFile: {
// save content to a file // save content to a file
if (temp_file_.empty()) { if (temp_file_.empty()) {
auto file = open_temp_file("file"); auto file = open_temp_file("file");
@ -201,7 +201,7 @@ Result<size_t> HttpReader::read_next(HttpQuery *query) {
return need_size; return need_size;
} }
case ReadArgs: { case State::ReadArgs: {
auto size = content_->size(); auto size = content_->size();
if (size > MAX_TOTAL_PARAMETERS_LENGTH - total_parameters_length_) { if (size > MAX_TOTAL_PARAMETERS_LENGTH - total_parameters_length_) {
return Status::Error(413, "Request Entity Too Large: too much parameters"); return Status::Error(413, "Request Entity Too Large: too much parameters");
@ -227,7 +227,7 @@ Result<size_t> HttpReader::read_next(HttpQuery *query) {
return need_size; return need_size;
} }
case ReadMultipartFormData: { case State::ReadMultipartFormData: {
TRY_RESULT(result, parse_multipart_form_data()); TRY_RESULT(result, parse_multipart_form_data());
if (result) { if (result) {
break; break;
@ -249,16 +249,16 @@ Result<size_t> HttpReader::read_next(HttpQuery *query) {
// returns false if need more data // returns false if need more data
Result<bool> HttpReader::parse_multipart_form_data() { Result<bool> HttpReader::parse_multipart_form_data() {
while (true) { while (true) {
LOG(DEBUG) << "Parsing multipart form data in state " << form_data_parse_state_; LOG(DEBUG) << "Parsing multipart form data in state " << static_cast<int32>(form_data_parse_state_);
switch (form_data_parse_state_) { switch (form_data_parse_state_) {
case SkipPrologue: case FormDataParseState::SkipPrologue:
if (find_boundary(content_->clone(), {boundary_.c_str() + 2, boundary_.size() - 2}, form_data_read_length_)) { if (find_boundary(content_->clone(), {boundary_.c_str() + 2, boundary_.size() - 2}, form_data_read_length_)) {
size_t to_skip = form_data_read_length_ + (boundary_.size() - 2); size_t to_skip = form_data_read_length_ + (boundary_.size() - 2);
content_->advance(to_skip); content_->advance(to_skip);
form_data_skipped_length_ += to_skip; form_data_skipped_length_ += to_skip;
form_data_read_length_ = 0; form_data_read_length_ = 0;
form_data_parse_state_ = ReadPartHeaders; form_data_parse_state_ = FormDataParseState::ReadPartHeaders;
continue; continue;
} }
@ -266,7 +266,7 @@ Result<bool> HttpReader::parse_multipart_form_data() {
form_data_skipped_length_ += form_data_read_length_; form_data_skipped_length_ += form_data_read_length_;
form_data_read_length_ = 0; form_data_read_length_ = 0;
return false; return false;
case ReadPartHeaders: case FormDataParseState::ReadPartHeaders:
if (find_boundary(content_->clone(), "\r\n\r\n", form_data_read_length_)) { if (find_boundary(content_->clone(), "\r\n\r\n", form_data_read_length_)) {
total_headers_length_ += form_data_read_length_; total_headers_length_ += form_data_read_length_;
if (total_headers_length_ > MAX_TOTAL_HEADERS_LENGTH) { if (total_headers_length_ > MAX_TOTAL_HEADERS_LENGTH) {
@ -382,11 +382,11 @@ Result<bool> HttpReader::parse_multipart_form_data() {
// don't need to save headers for files // don't need to save headers for files
file_field_name_ = field_name_.str(); file_field_name_ = field_name_.str();
form_data_parse_state_ = ReadFile; form_data_parse_state_ = FormDataParseState::ReadFile;
} else { } else {
// save headers for query parameters. They contain header names // save headers for query parameters. They contain header names
query_->container_.push_back(std::move(headers)); query_->container_.push_back(std::move(headers));
form_data_parse_state_ = ReadPartValue; form_data_parse_state_ = FormDataParseState::ReadPartValue;
} }
continue; continue;
@ -396,7 +396,7 @@ Result<bool> HttpReader::parse_multipart_form_data() {
return Status::Error(431, "Request Header Fields Too Large: total headers size exceeded"); return Status::Error(431, "Request Header Fields Too Large: total headers size exceeded");
} }
return false; return false;
case ReadPartValue: case FormDataParseState::ReadPartValue:
if (find_boundary(content_->clone(), boundary_, form_data_read_length_)) { if (find_boundary(content_->clone(), boundary_, form_data_read_length_)) {
if (total_parameters_length_ + form_data_read_length_ > MAX_TOTAL_PARAMETERS_LENGTH) { if (total_parameters_length_ + form_data_read_length_ > MAX_TOTAL_PARAMETERS_LENGTH) {
return Status::Error(413, "Request Entity Too Large: too much parameters in form data"); return Status::Error(413, "Request Entity Too Large: too much parameters in form data");
@ -421,7 +421,7 @@ Result<bool> HttpReader::parse_multipart_form_data() {
query_->args_.emplace_back(field_name_, value); query_->args_.emplace_back(field_name_, value);
} }
form_data_parse_state_ = CheckForLastBoundary; form_data_parse_state_ = FormDataParseState::CheckForLastBoundary;
continue; continue;
} }
CHECK(content_->size() < form_data_read_length_ + boundary_.size()); CHECK(content_->size() < form_data_read_length_ + boundary_.size());
@ -430,7 +430,7 @@ Result<bool> HttpReader::parse_multipart_form_data() {
return Status::Error(413, "Request Entity Too Large: too much parameters in form data"); return Status::Error(413, "Request Entity Too Large: too much parameters in form data");
} }
return false; return false;
case ReadFile: { case FormDataParseState::ReadFile: {
if (find_boundary(content_->clone(), boundary_, form_data_read_length_)) { if (find_boundary(content_->clone(), boundary_, form_data_read_length_)) {
auto file_part = content_->cut_head(form_data_read_length_).move_as_buffer_slice(); auto file_part = content_->cut_head(form_data_read_length_).move_as_buffer_slice();
content_->advance(boundary_.size()); content_->advance(boundary_.size());
@ -442,7 +442,7 @@ Result<bool> HttpReader::parse_multipart_form_data() {
query_->files_.emplace_back(file_field_name_, file_name_, field_content_type_, file_size_, temp_file_name_); query_->files_.emplace_back(file_field_name_, file_name_, field_content_type_, file_size_, temp_file_name_);
close_temp_file(); close_temp_file();
form_data_parse_state_ = CheckForLastBoundary; form_data_parse_state_ = FormDataParseState::CheckForLastBoundary;
continue; continue;
} }
@ -455,7 +455,7 @@ Result<bool> HttpReader::parse_multipart_form_data() {
TRY_STATUS(save_file_part(std::move(file_part))); TRY_STATUS(save_file_part(std::move(file_part)));
return false; return false;
} }
case CheckForLastBoundary: { case FormDataParseState::CheckForLastBoundary: {
if (content_->size() < 2) { if (content_->size() < 2) {
// need more data // need more data
return false; return false;
@ -467,13 +467,13 @@ Result<bool> HttpReader::parse_multipart_form_data() {
if (x[0] == '-' && x[1] == '-') { if (x[0] == '-' && x[1] == '-') {
content_->advance(2); content_->advance(2);
form_data_skipped_length_ += 2; form_data_skipped_length_ += 2;
form_data_parse_state_ = SkipEpilogue; form_data_parse_state_ = FormDataParseState::SkipEpilogue;
} else { } else {
form_data_parse_state_ = ReadPartHeaders; form_data_parse_state_ = FormDataParseState::ReadPartHeaders;
} }
continue; continue;
} }
case SkipEpilogue: { case FormDataParseState::SkipEpilogue: {
size_t size = content_->size(); size_t size = content_->size();
LOG(DEBUG) << "Skipping epilogue. Have " << size << " bytes"; LOG(DEBUG) << "Skipping epilogue. Have " << size << " bytes";
content_->advance(size); content_->advance(size);

View File

@ -46,7 +46,8 @@ class HttpReader {
size_t max_post_size_ = 0; size_t max_post_size_ = 0;
size_t max_files_ = 0; size_t max_files_ = 0;
enum { ReadHeaders, ReadContent, ReadContentToFile, ReadArgs, ReadMultipartFormData } state_; enum class State { ReadHeaders, ReadContent, ReadContentToFile, ReadArgs, ReadMultipartFormData };
State state_ = State::ReadHeaders;
size_t headers_read_length_ = 0; size_t headers_read_length_ = 0;
size_t content_length_ = 0; size_t content_length_ = 0;
ChainBufferReader *input_ = nullptr; ChainBufferReader *input_ = nullptr;
@ -68,14 +69,15 @@ class HttpReader {
string boundary_; string boundary_;
size_t form_data_read_length_ = 0; size_t form_data_read_length_ = 0;
size_t form_data_skipped_length_ = 0; size_t form_data_skipped_length_ = 0;
enum { enum class FormDataParseState : int32 {
SkipPrologue, SkipPrologue,
ReadPartHeaders, ReadPartHeaders,
ReadPartValue, ReadPartValue,
ReadFile, ReadFile,
CheckForLastBoundary, CheckForLastBoundary,
SkipEpilogue SkipEpilogue
} form_data_parse_state_; };
FormDataParseState form_data_parse_state_ = FormDataParseState::SkipPrologue;
MutableSlice field_name_; MutableSlice field_name_;
string file_field_name_; string file_field_name_;
string field_content_type_; string field_content_type_;

View File

@ -33,9 +33,9 @@ class Gzip::Impl {
}; };
Status Gzip::init_encode() { Status Gzip::init_encode() {
CHECK(mode_ == Empty); CHECK(mode_ == Mode::Empty);
init_common(); init_common();
mode_ = Encode; mode_ = Mode::Encode;
int ret = deflateInit2(&impl_->stream_, 6, Z_DEFLATED, 15, MAX_MEM_LEVEL, Z_DEFAULT_STRATEGY); int ret = deflateInit2(&impl_->stream_, 6, Z_DEFLATED, 15, MAX_MEM_LEVEL, Z_DEFAULT_STRATEGY);
if (ret != Z_OK) { if (ret != Z_OK) {
return Status::Error(PSLICE() << "zlib deflate init failed: " << ret); return Status::Error(PSLICE() << "zlib deflate init failed: " << ret);
@ -44,9 +44,9 @@ Status Gzip::init_encode() {
} }
Status Gzip::init_decode() { Status Gzip::init_decode() {
CHECK(mode_ == Empty); CHECK(mode_ == Mode::Empty);
init_common(); init_common();
mode_ = Decode; mode_ = Mode::Decode;
int ret = inflateInit2(&impl_->stream_, MAX_WBITS + 32); int ret = inflateInit2(&impl_->stream_, MAX_WBITS + 32);
if (ret != Z_OK) { if (ret != Z_OK) {
return Status::Error(PSLICE() << "zlib inflate init failed: " << ret); return Status::Error(PSLICE() << "zlib inflate init failed: " << ret);
@ -76,19 +76,19 @@ void Gzip::set_output(MutableSlice output) {
Result<Gzip::State> Gzip::run() { Result<Gzip::State> Gzip::run() {
while (true) { while (true) {
int ret; int ret;
if (mode_ == Decode) { if (mode_ == Mode::Decode) {
ret = inflate(&impl_->stream_, Z_NO_FLUSH); ret = inflate(&impl_->stream_, Z_NO_FLUSH);
} else { } else {
ret = deflate(&impl_->stream_, close_input_flag_ ? Z_FINISH : Z_NO_FLUSH); ret = deflate(&impl_->stream_, close_input_flag_ ? Z_FINISH : Z_NO_FLUSH);
} }
if (ret == Z_OK) { if (ret == Z_OK) {
return Running; return State::Running;
} }
if (ret == Z_STREAM_END) { if (ret == Z_STREAM_END) {
// TODO(now): fail if input is not empty; // TODO(now): fail if input is not empty;
clear(); clear();
return Done; return State::Done;
} }
clear(); clear();
return Status::Error(PSLICE() << "zlib error " << ret); return Status::Error(PSLICE() << "zlib error " << ret);
@ -119,12 +119,12 @@ void Gzip::init_common() {
} }
void Gzip::clear() { void Gzip::clear() {
if (mode_ == Decode) { if (mode_ == Mode::Decode) {
inflateEnd(&impl_->stream_); inflateEnd(&impl_->stream_);
} else if (mode_ == Encode) { } else if (mode_ == Mode::Encode) {
deflateEnd(&impl_->stream_); deflateEnd(&impl_->stream_);
} }
mode_ = Empty; mode_ = Mode::Empty;
} }
Gzip::Gzip() : impl_(make_unique<Impl>()) { Gzip::Gzip() : impl_(make_unique<Impl>()) {
@ -168,7 +168,7 @@ BufferSlice gzdecode(Slice s) {
return BufferSlice(); return BufferSlice();
} }
auto state = r_state.ok(); auto state = r_state.ok();
if (state == Gzip::Done) { if (state == Gzip::State::Done) {
message.confirm_append(gzip.flush_output()); message.confirm_append(gzip.flush_output());
break; break;
} }
@ -197,7 +197,7 @@ BufferSlice gzencode(Slice s, double k) {
return BufferSlice(); return BufferSlice();
} }
auto state = r_state.ok(); auto state = r_state.ok();
if (state != Gzip::Done) { if (state != Gzip::State::Done) {
return BufferSlice(); return BufferSlice();
} }
message.confirm_append(gzip.flush_output()); message.confirm_append(gzip.flush_output());

View File

@ -24,11 +24,11 @@ class Gzip {
Gzip &operator=(Gzip &&other); Gzip &operator=(Gzip &&other);
~Gzip(); ~Gzip();
enum Mode { Empty, Encode, Decode }; enum class Mode { Empty, Encode, Decode };
Status init(Mode mode) TD_WARN_UNUSED_RESULT { Status init(Mode mode) TD_WARN_UNUSED_RESULT {
if (mode == Encode) { if (mode == Mode::Encode) {
return init_encode(); return init_encode();
} else if (mode == Decode) { } else if (mode == Mode::Decode) {
return init_decode(); return init_decode();
} }
clear(); clear();
@ -79,7 +79,7 @@ class Gzip {
return res; return res;
} }
enum State { Running, Done }; enum class State { Running, Done };
Result<State> run() TD_WARN_UNUSED_RESULT; Result<State> run() TD_WARN_UNUSED_RESULT;
private: private:
@ -89,7 +89,7 @@ class Gzip {
size_t input_size_ = 0; size_t input_size_ = 0;
size_t output_size_ = 0; size_t output_size_ = 0;
bool close_input_flag_ = false; bool close_input_flag_ = false;
Mode mode_ = Empty; Mode mode_ = Mode::Empty;
void init_common(); void init_common();
void clear(); void clear();

View File

@ -52,7 +52,7 @@ void GzipByteFlow::loop() {
return finish(r_state.move_as_error()); return finish(r_state.move_as_error());
} }
auto state = r_state.ok(); auto state = r_state.ok();
if (state == Gzip::Done) { if (state == Gzip::State::Done) {
on_output_updated(); on_output_updated();
return consume_input(); return consume_input();
} }

View File

@ -44,9 +44,9 @@ Result<int> OptionsParser::run(int argc, char *argv[]) {
char buff[1024]; char buff[1024];
StringBuilder sb(MutableSlice{buff, sizeof(buff)}); StringBuilder sb(MutableSlice{buff, sizeof(buff)});
for (auto &opt : options_) { for (auto &opt : options_) {
CHECK(opt.type != Option::OptionalArg); CHECK(opt.type != Option::Type::OptionalArg);
sb << opt.short_key; sb << opt.short_key;
if (opt.type == Option::Arg) { if (opt.type == Option::Type::Arg) {
sb << ":"; sb << ":";
} }
} }
@ -63,7 +63,7 @@ Result<int> OptionsParser::run(int argc, char *argv[]) {
option o; option o;
o.flag = nullptr; o.flag = nullptr;
o.val = opt.short_key; o.val = opt.short_key;
o.has_arg = opt.type == Option::Arg ? required_argument : no_argument; o.has_arg = opt.type == Option::Type::Arg ? required_argument : no_argument;
o.name = opt.long_key.c_str(); o.name = opt.long_key.c_str();
long_options.push_back(o); long_options.push_back(o);
} }
@ -84,7 +84,7 @@ Result<int> OptionsParser::run(int argc, char *argv[]) {
for (auto &opt : options_) { for (auto &opt : options_) {
if (opt.short_key == opt_i) { if (opt.short_key == opt_i) {
Slice arg; Slice arg;
if (opt.type == Option::Arg) { if (opt.type == Option::Type::Arg) {
arg = Slice(optarg); arg = Slice(optarg);
} }
auto status = opt.arg_callback(arg); auto status = opt.arg_callback(arg);
@ -112,13 +112,13 @@ StringBuilder &operator<<(StringBuilder &sb, const OptionsParser &o) {
if (!opt.long_key.empty()) { if (!opt.long_key.empty()) {
sb << "|--" << opt.long_key; sb << "|--" << opt.long_key;
} }
if (opt.type == OptionsParser::Option::OptionalArg) { if (opt.type == OptionsParser::Option::Type::OptionalArg) {
sb << "["; sb << "[";
} }
if (opt.type != OptionsParser::Option::NoArg) { if (opt.type != OptionsParser::Option::Type::NoArg) {
sb << "<arg>"; sb << "<arg>";
} }
if (opt.type == OptionsParser::Option::OptionalArg) { if (opt.type == OptionsParser::Option::Type::OptionalArg) {
sb << "]"; sb << "]";
} }
sb << "\t" << opt.description; sb << "\t" << opt.description;

View File

@ -18,7 +18,7 @@ namespace td {
class OptionsParser { class OptionsParser {
class Option { class Option {
public: public:
enum Type { NoArg, Arg, OptionalArg }; enum class Type { NoArg, Arg, OptionalArg };
Type type; Type type;
char short_key; char short_key;
std::string long_key; std::string long_key;

View File

@ -38,7 +38,7 @@ class RegressionTesterImpl : public RegressionTester {
} }
RegressionTesterImpl(string db_path, string db_cache_dir) : db_path_(db_path), db_cache_dir_(db_cache_dir) { RegressionTesterImpl(string db_path, string db_cache_dir) : db_path_(db_path), db_cache_dir_(db_cache_dir) {
load_db(db_path); load_db(db_path).ignore();
if (db_cache_dir_.empty()) { if (db_cache_dir_.empty()) {
db_cache_dir_ = PathView(db_path).without_extension().str() + ".cache/"; db_cache_dir_ = PathView(db_path).without_extension().str() + ".cache/";
} }

View File

@ -42,8 +42,8 @@ TEST(Gzip, flow) {
td::ChainBufferWriter input_writer; td::ChainBufferWriter input_writer;
auto input = input_writer.extract_reader(); auto input = input_writer.extract_reader();
td::ByteFlowSource source(&input); td::ByteFlowSource source(&input);
td::GzipByteFlow gzip_flow(td::Gzip::Encode); td::GzipByteFlow gzip_flow(td::Gzip::Mode::Encode);
gzip_flow = td::GzipByteFlow(td::Gzip::Encode); gzip_flow = td::GzipByteFlow(td::Gzip::Mode::Encode);
td::ByteFlowSink sink; td::ByteFlowSink sink;
source >> gzip_flow >> sink; source >> gzip_flow >> sink;
@ -70,7 +70,7 @@ TEST(Gzip, flow_error) {
auto input_writer = td::ChainBufferWriter(); auto input_writer = td::ChainBufferWriter();
auto input = input_writer.extract_reader(); auto input = input_writer.extract_reader();
td::ByteFlowSource source(&input); td::ByteFlowSource source(&input);
td::GzipByteFlow gzip_flow(td::Gzip::Decode); td::GzipByteFlow gzip_flow(td::Gzip::Mode::Decode);
td::ByteFlowSink sink; td::ByteFlowSink sink;
source >> gzip_flow >> sink; source >> gzip_flow >> sink;
@ -92,10 +92,10 @@ TEST(Gzip, encode_decode_flow) {
td::ChainBufferWriter input_writer; td::ChainBufferWriter input_writer;
auto input = input_writer.extract_reader(); auto input = input_writer.extract_reader();
td::ByteFlowSource source(&input); td::ByteFlowSource source(&input);
td::GzipByteFlow gzip_encode_flow(td::Gzip::Encode); td::GzipByteFlow gzip_encode_flow(td::Gzip::Mode::Encode);
td::GzipByteFlow gzip_decode_flow(td::Gzip::Decode); td::GzipByteFlow gzip_decode_flow(td::Gzip::Mode::Decode);
td::GzipByteFlow gzip_encode_flow2(td::Gzip::Encode); td::GzipByteFlow gzip_encode_flow2(td::Gzip::Mode::Encode);
td::GzipByteFlow gzip_decode_flow2(td::Gzip::Decode); td::GzipByteFlow gzip_decode_flow2(td::Gzip::Mode::Decode);
td::ByteFlowSink sink; td::ByteFlowSink sink;
source >> gzip_encode_flow >> gzip_decode_flow >> gzip_encode_flow2 >> gzip_decode_flow2 >> sink; source >> gzip_encode_flow >> gzip_decode_flow >> gzip_encode_flow2 >> gzip_decode_flow2 >> sink;

View File

@ -356,11 +356,11 @@ TEST(Http, gzip_chunked_flow) {
auto str = rand_string('a', 'z', 1000000); auto str = rand_string('a', 'z', 1000000);
auto parts = rand_split(make_chunked(gzencode(str).as_slice().str())); auto parts = rand_split(make_chunked(gzencode(str).as_slice().str()));
td::ChainBufferWriter input_writer; ChainBufferWriter input_writer;
auto input = input_writer.extract_reader(); auto input = input_writer.extract_reader();
ByteFlowSource source(&input); ByteFlowSource source(&input);
HttpChunkedByteFlow chunked_flow; HttpChunkedByteFlow chunked_flow;
GzipByteFlow gzip_flow(Gzip::Decode); GzipByteFlow gzip_flow(Gzip::Mode::Decode);
ByteFlowSink sink; ByteFlowSink sink;
source >> chunked_flow >> gzip_flow >> sink; source >> chunked_flow >> gzip_flow >> sink;