Skip to content

Commit

Permalink
use class method get_explain_metrics() instead of property explain_me…
Browse files Browse the repository at this point in the history
…trics
  • Loading branch information
Linchin committed Aug 19, 2024
1 parent 9bf8b00 commit 7ae1028
Show file tree
Hide file tree
Showing 12 changed files with 114 additions and 90 deletions.
2 changes: 1 addition & 1 deletion google/cloud/firestore_v1/aggregation.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def get(
if explain_options is None:
explain_metrics = None
else:
explain_metrics = result.explain_metrics
explain_metrics = result.get_explain_metrics()

return QueryResultsList(result_list, explain_options, explain_metrics)

Expand Down
2 changes: 1 addition & 1 deletion google/cloud/firestore_v1/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def get(
if explain_options is None:
explain_metrics = None
else:
explain_metrics = result.explain_metrics
explain_metrics = result.get_explain_metrics()

return QueryResultsList(result_list, explain_options, explain_metrics)

Expand Down
16 changes: 13 additions & 3 deletions google/cloud/firestore_v1/query_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,21 @@ def __init__(
self._explain_metrics = explain_metrics

@property
def explain_options(self):
def explain_options(self) -> Optional[ExplainOptions]:
"""Query profiling options for getting these query results."""
return self._explain_options

@property
def explain_metrics(self):
def get_explain_metrics(self) -> ExplainMetrics:
"""
Get the metrics associated with the query execution.
Metrics are only available when explain_options is set on the query. If
ExplainOptions.analyze is False, only plan_summary is available. If it is
True, execution_stats is also available.
:rtype: :class:`~google.cloud.firestore_v1.query_profile.ExplainMetrics`
:returns: The metrics associated with the query execution.
:raises: :class:`~google.cloud.firestore_v1.query_profile.QueryExplainError`
if explain_metrics is not available on the query.
"""
if self._explain_options is None:
raise QueryExplainError("explain_options not set on query.")
else:
Expand Down
3 changes: 1 addition & 2 deletions google/cloud/firestore_v1/stream_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,7 @@ def explain_options(self) -> ExplainOptions | None:
"""Query profiling options for this stream request."""
return self._explain_options

@property
def explain_metrics(self) -> ExplainMetrics:
def get_explain_metrics(self) -> ExplainMetrics:
"""
Get the metrics associated with the query execution.
Metrics are only available when explain_options is set on the query. If
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/firestore_v1/vector_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def get(
if explain_options is None:
explain_metrics = None
else:
explain_metrics = result.explain_metrics
explain_metrics = result.get_explain_metrics()

return QueryResultsList(result_list, explain_options, explain_metrics)

Expand Down
100 changes: 53 additions & 47 deletions tests/system/test_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def test_collection_stream_or_get_w_no_explain_options(database, query_docs, met
QueryExplainError,
match="explain_options not set on query.",
):
results.explain_metrics
results.get_explain_metrics()


@pytest.mark.skipif(
Expand All @@ -142,10 +142,10 @@ def test_collection_stream_or_get_w_explain_options_analyze_false(
method_under_test = getattr(collection, method)
results = method_under_test(explain_options=ExplainOptions(analyze=False))

assert isinstance(results.explain_metrics, ExplainMetrics)

# Verify plan_summary.
plan_summary = results.explain_metrics.plan_summary
# Verify explain_metrics and plan_summary.
explain_metrics = results.get_explain_metrics()
assert isinstance(explain_metrics, ExplainMetrics)
plan_summary = explain_metrics.plan_summary
assert isinstance(plan_summary, PlanSummary)
assert len(plan_summary.indexes_used) > 0
assert plan_summary.indexes_used[0]["properties"] == "(__name__ ASC)"
Expand All @@ -156,7 +156,7 @@ def test_collection_stream_or_get_w_explain_options_analyze_false(
QueryExplainError,
match="execution_stats not available when explain_options.analyze=False",
):
results.explain_metrics.execution_stats
explain_metrics.execution_stats


@pytest.mark.skipif(
Expand Down Expand Up @@ -188,21 +188,22 @@ def test_collection_stream_or_get_w_explain_options_analyze_true(
QueryExplainError,
match="explain_metrics not available until query is complete",
):
results.explain_metrics
results.get_explain_metrics()

# Finish iterating results, and explain_metrics should be available.
num_results = len(list(results))
assert isinstance(results.explain_metrics, ExplainMetrics)

# Verify plan_summary.
plan_summary = results.explain_metrics.plan_summary
# Verify explain_metrics and plan_summary.
explain_metrics = results.get_explain_metrics()
assert isinstance(explain_metrics, ExplainMetrics)
plan_summary = explain_metrics.plan_summary
assert isinstance(plan_summary, PlanSummary)
assert len(plan_summary.indexes_used) > 0
assert plan_summary.indexes_used[0]["properties"] == "(__name__ ASC)"
assert plan_summary.indexes_used[0]["query_scope"] == "Collection"

# Verify execution_stats.
execution_stats = results.explain_metrics.execution_stats
execution_stats = explain_metrics.execution_stats
assert isinstance(execution_stats, ExecutionStats)
assert execution_stats.results_returned == num_results
assert execution_stats.read_operations == num_results
Expand Down Expand Up @@ -406,7 +407,7 @@ def test_vector_query_stream_or_get_w_no_explain_options(client, database, metho
QueryExplainError,
match="explain_options not set on query.",
):
results.explain_metrics
results.get_explain_metrics()


@pytest.mark.skipif(
Expand Down Expand Up @@ -446,14 +447,15 @@ def test_vector_query_stream_or_get_w_explain_options_analyze_true(
QueryExplainError,
match="explain_metrics not available until query is complete",
):
results.explain_metrics
results.get_explain_metrics()

# Finish iterating results, and explain_metrics should be available.
num_results = len(list(results))
assert isinstance(results.explain_metrics, ExplainMetrics)

# Verify plan_summary.
plan_summary = results.explain_metrics.plan_summary
# Verify explain_metrics and plan_summary.
explain_metrics = results.get_explain_metrics()
assert isinstance(explain_metrics, ExplainMetrics)
plan_summary = explain_metrics.plan_summary
assert isinstance(plan_summary, PlanSummary)
assert len(plan_summary.indexes_used) > 0
assert (
Expand All @@ -463,7 +465,7 @@ def test_vector_query_stream_or_get_w_explain_options_analyze_true(
assert plan_summary.indexes_used[0]["query_scope"] == "Collection group"

# Verify execution_stats.
execution_stats = results.explain_metrics.execution_stats
execution_stats = explain_metrics.execution_stats
assert isinstance(execution_stats, ExecutionStats)
assert execution_stats.results_returned == num_results
assert execution_stats.read_operations > 0
Expand Down Expand Up @@ -505,12 +507,13 @@ def test_vector_query_stream_or_get_w_explain_options_analyze_false(
method_under_test = getattr(vector_query, method)
results = method_under_test(explain_options=ExplainOptions(analyze=False))

assert isinstance(results.explain_metrics, ExplainMetrics)
results_list = list(results)
assert len(results_list) == 0

# Verify plan_summary.
plan_summary = results.explain_metrics.plan_summary
# Verify explain_metrics and plan_summary.
explain_metrics = results.get_explain_metrics()
assert isinstance(explain_metrics, ExplainMetrics)
plan_summary = explain_metrics.plan_summary
assert isinstance(plan_summary, PlanSummary)
assert len(plan_summary.indexes_used) > 0
assert (
Expand All @@ -524,7 +527,7 @@ def test_vector_query_stream_or_get_w_explain_options_analyze_false(
QueryExplainError,
match="execution_stats not available when explain_options.analyze=False",
):
results.explain_metrics.execution_stats
explain_metrics.execution_stats


@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True)
Expand Down Expand Up @@ -1188,7 +1191,7 @@ def test_query_stream_or_get_w_no_explain_options(query_docs, database, method):
# If no explain_option is passed, raise an exception if explain_metrics
# is called
with pytest.raises(QueryExplainError, match="explain_options not set on query"):
results.explain_metrics
results.get_explain_metrics()


@pytest.mark.skipif(
Expand Down Expand Up @@ -1222,21 +1225,22 @@ def test_query_stream_or_get_w_explain_options_analyze_true(
QueryExplainError,
match="explain_metrics not available until query is complete",
):
results.explain_metrics
results.get_explain_metrics()

# Finish iterating results, and explain_metrics should be available.
num_results = len(list(results))
assert isinstance(results.explain_metrics, ExplainMetrics)

# Verify plan_summary.
plan_summary = results.explain_metrics.plan_summary
# Verify explain_metrics and plan_summary.
explain_metrics = results.get_explain_metrics()
assert isinstance(explain_metrics, ExplainMetrics)
plan_summary = explain_metrics.plan_summary
assert isinstance(plan_summary, PlanSummary)
assert len(plan_summary.indexes_used) > 0
assert plan_summary.indexes_used[0]["properties"] == "(a ASC, __name__ ASC)"
assert plan_summary.indexes_used[0]["query_scope"] == "Collection"

# Verify execution_stats.
execution_stats = results.explain_metrics.execution_stats
execution_stats = explain_metrics.execution_stats
assert isinstance(execution_stats, ExecutionStats)
assert execution_stats.results_returned == num_results
assert execution_stats.read_operations == num_results
Expand Down Expand Up @@ -1273,13 +1277,13 @@ def test_query_stream_or_get_w_explain_options_analyze_false(
method_under_test = getattr(query, method)
results = method_under_test(explain_options=ExplainOptions(analyze=False))

assert isinstance(results.explain_metrics, ExplainMetrics)

results_list = list(results)
assert len(results_list) == 0

# Verify plan_summary.
plan_summary = results.explain_metrics.plan_summary
# Verify explain_metrics and plan_summary.
explain_metrics = results.get_explain_metrics()
assert isinstance(explain_metrics, ExplainMetrics)
plan_summary = explain_metrics.plan_summary
assert isinstance(plan_summary, PlanSummary)
assert len(plan_summary.indexes_used) > 0
assert plan_summary.indexes_used[0]["properties"] == "(a ASC, __name__ ASC)"
Expand All @@ -1290,7 +1294,7 @@ def test_query_stream_or_get_w_explain_options_analyze_false(
QueryExplainError,
match="execution_stats not available when explain_options.analyze=False",
):
results.explain_metrics.execution_stats
explain_metrics.execution_stats


@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True)
Expand Down Expand Up @@ -2688,7 +2692,7 @@ def test_aggregation_query_stream_or_get_w_no_explain_options(query, database, m
# If no explain_option is passed, raise an exception if explain_metrics
# is called
with pytest.raises(QueryExplainError, match="explain_options not set on query"):
results.explain_metrics
results.get_explain_metrics()


@pytest.mark.skipif(
Expand Down Expand Up @@ -2726,21 +2730,22 @@ def test_aggregation_query_stream_or_get_w_explain_options_analyze_true(
QueryExplainError,
match="explain_metrics not available until query is complete",
):
results.explain_metrics
results.get_explain_metrics()

# Finish iterating results, and explain_metrics should be available.
num_results = len(list(results))
assert isinstance(results.explain_metrics, ExplainMetrics)

# Verify plan_summary.
plan_summary = results.explain_metrics.plan_summary
# Verify explain_metrics and plan_summary.
explain_metrics = results.get_explain_metrics()
assert isinstance(explain_metrics, ExplainMetrics)
plan_summary = explain_metrics.plan_summary
assert isinstance(plan_summary, PlanSummary)
assert len(plan_summary.indexes_used) > 0
assert plan_summary.indexes_used[0]["properties"] == "(a ASC, __name__ ASC)"
assert plan_summary.indexes_used[0]["query_scope"] == "Collection"

# Verify execution_stats.
execution_stats = results.explain_metrics.execution_stats
execution_stats = explain_metrics.execution_stats
assert isinstance(execution_stats, ExecutionStats)
assert execution_stats.results_returned == num_results
assert execution_stats.read_operations == num_results
Expand Down Expand Up @@ -2781,10 +2786,10 @@ def test_aggregation_query_stream_or_get_w_explain_options_analyze_false(
method_under_test = getattr(count_query, method)
results = method_under_test(explain_options=ExplainOptions(analyze=False))

assert isinstance(results.explain_metrics, ExplainMetrics)

# Verify plan_summary.
plan_summary = results.explain_metrics.plan_summary
# Verify explain_metrics and plan_summary.
explain_metrics = results.get_explain_metrics()
assert isinstance(explain_metrics, ExplainMetrics)
plan_summary = explain_metrics.plan_summary
assert isinstance(plan_summary, PlanSummary)
assert len(plan_summary.indexes_used) > 0
assert plan_summary.indexes_used[0]["properties"] == "(a ASC, __name__ ASC)"
Expand All @@ -2795,7 +2800,7 @@ def test_aggregation_query_stream_or_get_w_explain_options_analyze_false(
QueryExplainError,
match="execution_stats not available when explain_options.analyze=False",
):
results.explain_metrics.execution_stats
explain_metrics.execution_stats


@pytest.mark.parametrize("database", [None, FIRESTORE_OTHER_DB], indirect=True)
Expand Down Expand Up @@ -3009,15 +3014,16 @@ def in_transaction(transaction):
with pytest.raises(
QueryExplainError, match="explain_options not set on query."
):
result_1.explain_metrics
result_1.get_explain_metrics()

result_2 = query.get(
transaction=transaction,
explain_options=ExplainOptions(analyze=True),
)
assert isinstance(result_2.explain_metrics, ExplainMetrics)
assert result_2.explain_metrics.plan_summary is not None
assert result_2.explain_metrics.execution_stats is not None
explain_metrics = result_2.get_explain_metrics()
assert isinstance(explain_metrics, ExplainMetrics)
assert explain_metrics.plan_summary is not None
assert explain_metrics.execution_stats is not None

inner_fn_ran = True

Expand Down
14 changes: 8 additions & 6 deletions tests/unit/v1/test_aggregation.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,10 +443,11 @@ def _aggregation_query_get_helper(

if explain_options is None:
with pytest.raises(QueryExplainError, match="explain_options not set"):
returned.explain_metrics
returned.get_explain_metrics()
else:
assert isinstance(returned.explain_metrics, ExplainMetrics)
assert returned.explain_metrics.execution_stats.results_returned == 1
actual_explain_metrics = returned.get_explain_metrics()
assert isinstance(actual_explain_metrics, ExplainMetrics)
assert actual_explain_metrics.execution_stats.results_returned == 1

parent_path, _ = parent._parent_info()
expected_request = {
Expand Down Expand Up @@ -728,10 +729,11 @@ def _aggregation_query_stream_helper(

if explain_options is None:
with pytest.raises(QueryExplainError, match="explain_options not set"):
returned.explain_metrics
returned.get_explain_metrics()
else:
assert isinstance(returned.explain_metrics, ExplainMetrics)
assert returned.explain_metrics.execution_stats.results_returned == 1
explain_metrics = returned.get_explain_metrics()
assert isinstance(explain_metrics, ExplainMetrics)
assert explain_metrics.execution_stats.results_returned == 1

parent_path, _ = parent._parent_info()
expected_request = {
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/v1/test_base_document.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,7 +433,7 @@ def test_query_results_list_explain_metrics_w_explain_options():
explain_metrics=explain_metrics,
)

assert snapshot_list.explain_metrics == explain_metrics
assert snapshot_list.get_explain_metrics() == explain_metrics


def test_query_results_list_explain_metrics_wo_explain_options():
Expand All @@ -442,7 +442,7 @@ def test_query_results_list_explain_metrics_wo_explain_options():
snapshot_list = _make_query_results_list([])

with pytest.raises(QueryExplainError):
snapshot_list.explain_metrics
snapshot_list.get_explain_metrics()


def test__get_document_path():
Expand Down
Loading

0 comments on commit 7ae1028

Please sign in to comment.