Replies: 2 comments 1 reply
-
Regarding using #include <jsoncons/json.hpp>
#include <iostream>
class item_decoder : public jsoncons::json_filter
{
jsoncons::json_decoder<jsoncons::json> decoder_;
public:
item_decoder()
: jsoncons::json_filter(decoder_)
{
}
bool done() const
{
return done_;
}
void reset()
{
done_ = false;
decoder_.reset();
}
jsoncons::json get_result()
{
return decoder_.get_result();
}
private:
int level_{ 0 };
bool done_{ false };
bool visit_begin_array(jsoncons::semantic_tag tag,
const jsoncons::ser_context& context, std::error_code& ec) override
{
if (level_ > 0)
{
this->destination().begin_array(tag, context, ec);
}
++level_;
return true;
}
bool visit_end_array(const jsoncons::ser_context& context,
std::error_code& ec) override
{
--level_;
if (level_ > 0)
{
destination().end_array(context, ec);
}
if (level_ == 1)
{
done_ = true;
}
return true;
}
bool visit_begin_object(jsoncons::semantic_tag tag,
const jsoncons::ser_context& context, std::error_code& ec) override
{
if (level_ > 0)
{
this->destination().begin_object(tag, context, ec);
}
++level_;
return true;
}
bool visit_end_object(const jsoncons::ser_context& context,
std::error_code& ec) override
{
--level_;
if (level_ > 0)
{
destination().end_object(context, ec);
}
if (level_ == 1)
{
done_ = true;
}
return true;
}
};
int main()
{
std::vector<std::string> chunks = { "[",
R"({"foo":"b)",
R"(ar"},)",
R"({"baz":"q)",
R"(ux"}])"};
item_decoder filter{};
jsoncons::json_parser parser;
try
{
for (const auto& chunk : chunks)
{
parser.update(chunk);
parser.parse_some(filter);
if (filter.done())
{
jsoncons::json j = filter.get_result();
std::cout << j << "\n";
filter.reset();
}
}
}
catch (const jsoncons::ser_error& e)
{
std::cout << e.what() << '\n';
}
} Output:
|
Beta Was this translation helpful? Give feedback.
1 reply
-
Yes, my quick and dirty "item decoder" had some deficiencies. Here's a more robust implementation. #include <jsoncons/json.hpp>
#include <iostream>
template <typename Json>
class item_decoder : public jsoncons::json_filter
{
jsoncons::json_decoder<Json> decoder_;
std::vector<Json> results_;
public:
item_decoder()
: jsoncons::json_filter(decoder_)
{
}
bool has_results() const
{
return !results_.empty();
}
std::vector<Json> get_results()
{
std::vector<Json> temp;
temp.swap(results_);
return temp;
}
private:
int level_{ 0 };
bool visit_begin_array(jsoncons::semantic_tag tag,
const jsoncons::ser_context& context, std::error_code& ec) override
{
if (level_ > 0)
{
this->destination().begin_array(tag, context, ec);
}
++level_;
return true;
}
bool visit_end_array(const jsoncons::ser_context& context,
std::error_code& ec) override
{
--level_;
if (level_ > 0)
{
destination().end_array(context, ec);
}
return true;
}
bool visit_begin_object(jsoncons::semantic_tag tag,
const jsoncons::ser_context& context, std::error_code& ec) override
{
++level_;
if (level_ == 2)
{
decoder_.reset();
}
this->destination().begin_object(tag, context, ec);
return true;
}
bool visit_end_object(const jsoncons::ser_context& context,
std::error_code& ec) override
{
destination().end_object(context, ec);
if (level_ == 2)
{
results_.push_back(decoder_.get_result());
}
--level_;
return true;
}
};
void test_item_decoder(const std::vector<std::string>& chunks)
{
item_decoder<jsoncons::json> filter{};
jsoncons::json_parser parser;
try
{
for (const auto& chunk : chunks)
{
parser.update(chunk);
parser.parse_some(filter);
std::cout << "parse a little\n";
if (filter.has_results())
{
auto v = filter.get_results();
for (const auto& j : v)
{
std::cout << j << "\n";
}
}
}
}
catch (const jsoncons::ser_error& e)
{
std::cout << e.what() << '\n';
}
}
int main()
{
std::vector<std::string> chunks1 = {
"[",
R"({"lost":"object"}, {"foo":"bar"},)",
R"({"baz":"q)",
R"(ux"}])"
};
std::vector<std::string> chunks2 = {
"[",
R"({"lost":"object"}, {"foo")",
R"(:"bar"},)",
R"({"baz":"q)",
R"(ux"}])"
};
test_item_decoder(chunks1);
std::cout << "\n";
test_item_decoder(chunks2);
} Output:
The basic idea is that you intercept the parse events in a filter and then do with them what you like. |
Beta Was this translation helpful? Give feedback.
0 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Uh oh!
There was an error while loading. Please reload this page.
-
Hello Daniel,
I have an array of JSON objects and would like to retrieve them one by one as they are fully parsed.
Let’s look at this example:
https://github.com/danielaparker/jsoncons/blob/master/doc/Examples.md#I3
This code almost achieves what I need, but there is an issue.
I want to append data to the stream as it arrives from the network, but I cannot find a way to notify the library to resume parsing from where it stopped.
I have seen the incremental parsing example in the documentation:
https://github.com/danielaparker/jsoncons/blob/master/doc/ref/corelib/basic_json_parser.md#incremental-parsing-until-100-since-110
However, this approach does not allow me to retrieve objects individually.
Is there a way to combine these two methods?
Thank you for your time and contributions to the open-source community!
Best regards,
Alexey Komarov
Beta Was this translation helpful? Give feedback.
All reactions