Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove responses and return a list #19872

Merged
merged 8 commits into from
Jul 26, 2021
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions sdk/monitor/azure-monitor-query/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
### Breaking Changes

- `aggregation` param in the query API is renamed to `aggregations`
- `batch_query` API now returns a list of responses.
- `LogsBatchResults` model is now removed.

### Bugs Fixed

Expand Down
4 changes: 2 additions & 2 deletions sdk/monitor/azure-monitor-query/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -172,8 +172,8 @@ requests = [
]
response = client.batch_query(requests)

for response in response.responses:
body = response.body
for rsp in response:
body = rsp.body
if not body.tables:
print("Something is wrong")
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
MetricsResult,
LogsBatchResultError,
LogsQueryRequest,
LogsBatchResults,
MetricNamespace,
MetricDefinition,
MetricsMetadataValue,
Expand All @@ -30,7 +29,6 @@
__all__ = [
"AggregationType",
"LogsQueryClient",
"LogsBatchResults",
"LogsBatchResultError",
"LogsQueryResults",
"LogsQueryResultColumn",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
from ._generated._monitor_query_client import MonitorQueryClient

from ._generated.models import BatchRequest, QueryBody as LogsQueryBody
from ._helpers import get_authentication_policy, process_error, construct_iso8601
from ._models import LogsQueryResults, LogsQueryRequest, LogsBatchResults
from ._helpers import get_authentication_policy, process_error, construct_iso8601, order_results
from ._models import LogsQueryResults, LogsQueryRequest, LogsQueryResult

if TYPE_CHECKING:
from azure.core.credentials import TokenCredential
Expand Down Expand Up @@ -131,7 +131,7 @@ def query(self, workspace_id, query, duration=None, **kwargs):
process_error(e)

def batch_query(self, queries, **kwargs):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't it return a pageable object?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no - service doesn't support paging

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think our guideline is even service does not support page, we should wrap the return into pageable results?
@annatisch

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we talked to the architects and they were fine in this case with not supporting both paging and LRO
as service is not planning to ever support it on these endpoints

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i have created this issue for september milestone to follow up again #19942

# type: (Union[Sequence[Dict], Sequence[LogsQueryRequest]], Any) -> LogsBatchResults
# type: (Union[Sequence[Dict], Sequence[LogsQueryRequest]], Any) -> Sequence[LogsQueryResult]
"""Execute a list of analytics queries. Each request can be either a LogQueryRequest
object or an equivalent serialized model.

Expand All @@ -140,7 +140,7 @@ def batch_query(self, queries, **kwargs):
:param queries: The list of queries that should be processed
:type queries: list[dict] or list[~azure.monitor.query.LogsQueryRequest]
:return: BatchResponse, or the result of cls(response)
:rtype: ~azure.monitor.query.LogsBatchResults
:rtype: ~list[~azure.monitor.query.LogsQueryResult]
:raises: ~azure.core.exceptions.HttpResponseError

.. admonition:: Example:
Expand All @@ -162,9 +162,11 @@ def batch_query(self, queries, **kwargs):
request_order = [req['id'] for req in queries]
batch = BatchRequest(requests=queries)
generated = self._query_op.batch(batch, **kwargs)
return LogsBatchResults._from_generated( # pylint: disable=protected-access
generated, request_order
)
return order_results(
request_order,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what if the id in request_order does not exist in results?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it will always exist - it's a 1:1 mapping

[
LogsQueryResult._from_generated(rsp) for rsp in generated.responses # pylint: disable=protected-access
])

def close(self):
# type: () -> None
Expand Down
25 changes: 1 addition & 24 deletions sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import uuid
from typing import Any, Optional, List

from ._helpers import order_results, construct_iso8601
from ._helpers import construct_iso8601
from ._generated.models import (
Column as InternalColumn,
BatchQueryRequest as InternalLogQueryRequest,
Expand Down Expand Up @@ -234,29 +234,6 @@ def _from_generated(cls, generated):
body=LogsQueryResults._from_generated(generated.body) # pylint: disable=protected-access
)

class LogsBatchResults(object):
"""Response to a batch.

:keyword responses: An array of responses corresponding to each individual request in a batch.
:paramtype responses: list[azure.monitor.query.LogsQueryResult]
:keyword error: Error response for a batch request.
:paramtype error: ~azure.monitor.query.LogsBatchResultError
"""
def __init__(self, **kwargs):
# type: (Any) -> None
self.responses = kwargs.get("responses", None)
self.error = kwargs.get("error", None)

@classmethod
def _from_generated(cls, generated, request_order):
if not generated:
return cls()
return cls(
responses=order_results(request_order, [
LogsQueryResult._from_generated(rsp) for rsp in generated.responses # pylint: disable=protected-access
])
)


class LogsBatchResultError(object):
"""Error response for a batch request.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
from .._generated.aio._monitor_query_client import MonitorQueryClient

from .._generated.models import BatchRequest, QueryBody as LogsQueryBody
from .._helpers import process_error, construct_iso8601
from .._models import LogsQueryResults, LogsQueryRequest, LogsBatchResults
from .._helpers import process_error, construct_iso8601, order_results
from .._models import LogsQueryResults, LogsQueryRequest, LogsQueryResult
from ._helpers_asyc import get_authentication_policy

if TYPE_CHECKING:
Expand Down Expand Up @@ -118,7 +118,7 @@ async def batch_query(
self,
queries: Union[Sequence[Dict], Sequence[LogsQueryRequest]],
**kwargs: Any
) -> LogsBatchResults:
) -> Sequence[LogsQueryResult]:
"""Execute a list of analytics queries. Each request can be either a LogQueryRequest
object or an equivalent serialized model.

Expand All @@ -127,7 +127,7 @@ async def batch_query(
:param queries: The list of queries that should be processed
:type queries: list[dict] or list[~azure.monitor.query.LogsQueryRequest]
:return: BatchResponse, or the result of cls(response)
:rtype: ~azure.monitor.query.LogsBatchResults
:rtype: ~list[~azure.monitor.query.LogsQueryResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
try:
Expand All @@ -139,9 +139,12 @@ async def batch_query(
except AttributeError:
request_order = [req['id'] for req in queries]
batch = BatchRequest(requests=queries)
return LogsBatchResults._from_generated( # pylint: disable=protected-access
await self._query_op.batch(batch, **kwargs), request_order
)
generated = await self._query_op.batch(batch, **kwargs)
return order_results(
request_order,
[
LogsQueryResult._from_generated(rsp) for rsp in generated.responses # pylint: disable=protected-access
])

async def __aenter__(self) -> "LogsQueryClient":
await self._client.__aenter__()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,14 @@
workspace_id= os.environ['LOG_WORKSPACE_ID']
),
LogsQueryRequest(
query= "AppRequests",
query= "AppRequests | take 5",
workspace_id= os.environ['LOG_WORKSPACE_ID'],
include_statistics=True
),
]
response = client.batch_query(requests)
responses = client.batch_query(requests)

for response in response.responses:
for response in responses:
body = response.body
print(response.id)
if not body.tables:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.

import os
import pandas as pd
from datetime import datetime, timedelta
from msrest.serialization import UTC
from azure.monitor.query import LogsQueryClient
from azure.identity import DefaultAzureCredential

credential = DefaultAzureCredential()
client = LogsQueryClient(credential)

# Response time trend
# request duration over the last 12 hours.
query = """AppRequests |
summarize avgRequestDuration=avg(DurationMs) by bin(TimeGenerated, 10m), _ResourceId"""

end_time = datetime.now(UTC())

# returns LogsQueryResults
response = client.query(os.environ['LOG_WORKSPACE_ID'], query, duration=timedelta(days=1), end_time=end_time)

if not response.tables:
print("No results for the query")

for table in response.tables:
pd.json_normalize
df = pd.DataFrame(table.rows, columns=[col.name for col in table.columns])
key_value = df.to_dict(orient='records')
print(key_value)

"""
[
{
'TimeGenerated': '2021-07-21T04:40:00Z',
'_ResourceId': '/subscriptions/faa080af....',
'avgRequestDuration': 19.7987
},
{
'TimeGenerated': '2021-07-21T04:50:00Z',
'_ResourceId': '/subscriptions/faa08....',
'avgRequestDuration': 33.9654
},
{
'TimeGenerated': '2021-07-21T05:00:00Z',
'_ResourceId': '/subscriptions/faa080....',
'avgRequestDuration': 44.13115
}
]
"""
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ async def test_logs_batch_query():
]
response = await client.batch_query(requests)

assert len(response.responses) == 3
assert len(response) == 3

@pytest.mark.skip('https://github.com/Azure/azure-sdk-for-python/issues/19382')
@pytest.mark.live_test_only
Expand Down Expand Up @@ -110,7 +110,7 @@ async def test_logs_batch_query_additional_workspaces():
]
response = await client.batch_query(requests)

assert len(response.responses) == 3
assert len(response) == 3

for resp in response.responses:
for resp in response:
assert len(resp.body.tables[0].rows) == 2
12 changes: 6 additions & 6 deletions sdk/monitor/azure-monitor-query/tests/test_logs_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def test_logs_server_timeout():
"range x from 1 to 10000000000 step 1 | count",
rakshith91 marked this conversation as resolved.
Show resolved Hide resolved
server_timeout=1,
)
assert e.message.contains('Gateway timeout')
assert e.value.message.contains('Gateway timeout')
rakshith91 marked this conversation as resolved.
Show resolved Hide resolved

@pytest.mark.live_test_only
def test_logs_batch_query():
Expand All @@ -83,7 +83,7 @@ def test_logs_batch_query():
]
response = client.batch_query(requests)

assert len(response.responses) == 3
assert len(response) == 3

@pytest.mark.live_test_only
def test_logs_single_query_with_statistics():
Expand Down Expand Up @@ -121,9 +121,9 @@ def test_logs_batch_query_with_statistics_in_some():
]
response = client.batch_query(requests)

assert len(response.responses) == 3
assert response.responses[0].body.statistics is None
assert response.responses[2].body.statistics is not None
assert len(response) == 3
assert response[0].body.statistics is None
assert response[2].body.statistics is not None

@pytest.mark.skip('https://github.com/Azure/azure-sdk-for-python/issues/19382')
@pytest.mark.live_test_only
Expand Down Expand Up @@ -169,5 +169,5 @@ def test_logs_batch_query_additional_workspaces():
]
response = client.batch_query(requests)

for resp in response.responses:
for resp in response:
assert len(resp.body.tables[0].rows) == 2