static bool
handle_oauth_sasl_error(PGconn *conn, const char *msg, int msglen)
{
- JsonLexContext lex = {0};
+ JsonLexContext *lex;
JsonSemAction sem = {0};
JsonParseErrorType err;
struct json_ctx ctx = {0};
return false;
}
- makeJsonLexContextCstringLen(&lex, msg, msglen, PG_UTF8, true);
- setJsonLexContextOwnsTokens(&lex, true); /* must not leak on error */
+ lex = makeJsonLexContextCstringLen(NULL, msg, msglen, PG_UTF8, true);
+ setJsonLexContextOwnsTokens(lex, true); /* must not leak on error */
initPQExpBuffer(&ctx.errbuf);
sem.semstate = &ctx;
sem.array_start = oauth_json_array_start;
sem.scalar = oauth_json_scalar;
- err = pg_parse_json(&lex, &sem);
+ err = pg_parse_json(lex, &sem);
if (err == JSON_SEM_ACTION_FAILED)
{
}
}
else if (err != JSON_SUCCESS)
- errmsg = json_errdetail(err, &lex);
+ errmsg = json_errdetail(err, lex);
if (errmsg)
libpq_append_conn_error(conn,
/* Don't need the error buffer or the JSON lexer anymore. */
termPQExpBuffer(&ctx.errbuf);
- freeJsonLexContext(&lex);
+ freeJsonLexContext(lex);
if (errmsg)
goto cleanup;
char buff[BUFSIZE];
FILE *json_file;
JsonParseErrorType result;
- JsonLexContext lex;
+ JsonLexContext *lex;
StringInfoData json;
int n_read;
size_t chunk_size = DEFAULT_CHUNK_SIZE;
pg_logging_init(argv[0]);
+ lex = calloc(1, sizeof(JsonLexContext));
+ if (!lex)
+ pg_fatal("out of memory");
+
while ((c = getopt(argc, argv, "c:os")) != -1)
{
switch (c)
case 's': /* do semantic processing */
testsem = &sem;
sem.semstate = palloc(sizeof(struct DoState));
- ((struct DoState *) sem.semstate)->lex = &lex;
+ ((struct DoState *) sem.semstate)->lex = lex;
((struct DoState *) sem.semstate)->buf = makeStringInfo();
need_strings = true;
break;
exit(1);
}
- makeJsonLexContextIncremental(&lex, PG_UTF8, need_strings);
- setJsonLexContextOwnsTokens(&lex, lex_owns_tokens);
+ makeJsonLexContextIncremental(lex, PG_UTF8, need_strings);
+ setJsonLexContextOwnsTokens(lex, lex_owns_tokens);
initStringInfo(&json);
if ((json_file = fopen(testfile, PG_BINARY_R)) == NULL)
bytes_left -= n_read;
if (bytes_left > 0)
{
- result = pg_parse_json_incremental(&lex, testsem,
+ result = pg_parse_json_incremental(lex, testsem,
json.data, n_read,
false);
if (result != JSON_INCOMPLETE)
{
- fprintf(stderr, "%s\n", json_errdetail(result, &lex));
+ fprintf(stderr, "%s\n", json_errdetail(result, lex));
ret = 1;
goto cleanup;
}
}
else
{
- result = pg_parse_json_incremental(&lex, testsem,
+ result = pg_parse_json_incremental(lex, testsem,
json.data, n_read,
true);
if (result != JSON_SUCCESS)
{
- fprintf(stderr, "%s\n", json_errdetail(result, &lex));
+ fprintf(stderr, "%s\n", json_errdetail(result, lex));
ret = 1;
goto cleanup;
}
cleanup:
fclose(json_file);
- freeJsonLexContext(&lex);
+ freeJsonLexContext(lex);
free(json.data);
+ free(lex);
return ret;
}