mirror of
https://github.com/ChronosX88/psyced.git
synced 2024-11-08 19:41:00 +00:00
fix in jsonparser: use float when int isn't big enough
This commit is contained in:
parent
5a6e4d4ff2
commit
9d10b13569
@ -527,7 +527,9 @@ PROTECTED mixed nextObject() {
|
||||
if ((b >= '0' && b <= '9') || b == '.' || b == '-' || b == '+') {
|
||||
int a; float b_; string c_;
|
||||
sscanf(s, "%d%s", a, c_);
|
||||
if(c_ && sizeof(c_)) {
|
||||
// some values of json ints exceed the limits of MAX_INT, in that case we need to use float -lynX
|
||||
// maybe it is more efficient, if we used float in all cases then
|
||||
if ((c_ && sizeof(c_)) || (a && s != to_string(a))) {
|
||||
#ifdef __PIKE__
|
||||
sscanf(s, "%f", b_);
|
||||
#else
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
#include <net.h>
|
||||
|
||||
persistent int lastid;
|
||||
persistent float lastid;
|
||||
|
||||
volatile object feed;
|
||||
|
||||
@ -28,15 +28,21 @@ parse(string body, mapping headers) {
|
||||
P4((body))
|
||||
//#endif
|
||||
unless (pointerp(wurst = parse_json(body))) {
|
||||
P1(("%O failed to parse its timeline.\n", ME))
|
||||
monitor_report("_failure_network_fetch_twitter_empty",
|
||||
"[_source] failed to parse its timeline");
|
||||
return;
|
||||
}
|
||||
unless (sizeof(wurst)) {
|
||||
P1(("%O received an empty structure.\n", ME))
|
||||
monitor_report("_failure_network_fetch_twitter_empty",
|
||||
"[_source] received an empty structure.");
|
||||
return;
|
||||
}
|
||||
// this used to fail on MAX_INT turning the ints to negative.. interestingly
|
||||
// it works out of the box now that i convert this to float. funny to run into
|
||||
// such a weird problem only after months of usage, but if twitter never resets
|
||||
// its packet ids, that's where you end up.. bignums!
|
||||
if (wurst[0]["id"] <= lastid) {
|
||||
P1(("%O received %d old updates.\n", ME, sizeof(wurst)))
|
||||
P1(("%O received %d old updates (id0 %O <= lastid %O).\n", ME, sizeof(wurst), wurst[0]["id"], lastid))
|
||||
return;
|
||||
}
|
||||
lastid = wurst[0]["id"];
|
||||
|
Loading…
Reference in New Issue
Block a user