|
| 1 | +"""Benchmarks for pickle serialization of parsed queries. |
| 2 | +
|
| 3 | +This module benchmarks pickle serialization using a large query (~100KB) |
| 4 | +to provide realistic performance numbers for query caching use cases. |
| 5 | +""" |
| 6 | + |
| 7 | +import pickle |
| 8 | + |
| 9 | +from graphql import parse |
| 10 | + |
| 11 | +from ..fixtures import large_query # noqa: F401 |
| 12 | + |
| 13 | +# Parse benchmark |
| 14 | + |
| 15 | + |
| 16 | +def test_parse_large_query(benchmark, large_query): # noqa: F811 |
| 17 | + """Benchmark parsing large query.""" |
| 18 | + result = benchmark(lambda: parse(large_query, no_location=True)) |
| 19 | + assert result is not None |
| 20 | + |
| 21 | + |
| 22 | +# Pickle benchmarks |
| 23 | + |
| 24 | + |
| 25 | +def test_pickle_large_query_roundtrip(benchmark, large_query): # noqa: F811 |
| 26 | + """Benchmark pickle roundtrip for large query AST.""" |
| 27 | + document = parse(large_query, no_location=True) |
| 28 | + |
| 29 | + def roundtrip(): |
| 30 | + encoded = pickle.dumps(document) |
| 31 | + return pickle.loads(encoded) |
| 32 | + |
| 33 | + result = benchmark(roundtrip) |
| 34 | + assert result == document |
| 35 | + |
| 36 | + |
| 37 | +def test_pickle_large_query_encode(benchmark, large_query): # noqa: F811 |
| 38 | + """Benchmark pickle encoding for large query AST.""" |
| 39 | + document = parse(large_query, no_location=True) |
| 40 | + result = benchmark(lambda: pickle.dumps(document)) |
| 41 | + assert isinstance(result, bytes) |
| 42 | + |
| 43 | + |
| 44 | +def test_pickle_large_query_decode(benchmark, large_query): # noqa: F811 |
| 45 | + """Benchmark pickle decoding for large query AST.""" |
| 46 | + document = parse(large_query, no_location=True) |
| 47 | + encoded = pickle.dumps(document) |
| 48 | + |
| 49 | + result = benchmark(lambda: pickle.loads(encoded)) |
| 50 | + assert result == document |
0 commit comments