Skip to content

Commit

Permalink
Merge pull request #33 from dqops/1.6.0
Browse files Browse the repository at this point in the history
1.6.0
  • Loading branch information
dqops authored Jul 22, 2024
2 parents cef4901 + 82a16b2 commit 5b36adb
Show file tree
Hide file tree
Showing 1,615 changed files with 25,392 additions and 12,110 deletions.
2 changes: 1 addition & 1 deletion .run/dqo run.run.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<option name="region" />
<option name="useCurrentConnection" value="false" />
</extension>
<option name="JAR_PATH" value="$PROJECT_DIR$/dqops/target/dqo-dqops-1.5.0.jar" />
<option name="JAR_PATH" value="$PROJECT_DIR$/dqops/target/dqo-dqops-1.6.0.jar" />
<option name="VM_PARAMETERS" value="-XX:MaxRAMPercentage=60.0 --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED" />
<option name="PROGRAM_PARAMETERS" value="--server.port=8888" />
<option name="WORKING_DIRECTORY" value="C:\dev\dqoado" />
Expand Down
13 changes: 6 additions & 7 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# 1.5.0
* Error sampling
* DuckDB 1.0.0 upgrade
* Global incident management screen
* Support for analyzing flat files on GCP
* Freshness anomaly check
* Filtering tables by name on the table import screen
# 1.6.0
* Fixes to some error sampling templates
* Redesigned data quality check editor to work in a simplified mode
* JDBC drivers are pre-loaded to avoid issues with automatic registration of JDBC drivers by Java
* Delta Lake and Iceberg support
* Global incident screen redesigned to show the counts of incidents
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.5.0
1.6.0
2 changes: 1 addition & 1 deletion distribution/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

<groupId>com.dqops</groupId>
<artifactId>dqo-distribution</artifactId>
<version>1.5.0</version> <!-- DQOps Version, do not touch (changed automatically) -->
<version>1.6.0</version> <!-- DQOps Version, do not touch (changed automatically) -->
<name>dqo-distribution</name>
<description>DQOps Data Quality Operations Center final assembly</description>
<packaging>pom</packaging>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ def _get_kwargs(
acknowledged: Union[Unset, None, bool] = UNSET,
resolved: Union[Unset, None, bool] = UNSET,
muted: Union[Unset, None, bool] = UNSET,
severity: Union[Unset, None, int] = UNSET,
page: Union[Unset, None, int] = UNSET,
limit: Union[Unset, None, int] = UNSET,
filter_: Union[Unset, None, str] = UNSET,
Expand All @@ -41,6 +42,8 @@ def _get_kwargs(

params["muted"] = muted

params["severity"] = severity

params["page"] = page

params["limit"] = limit
Expand Down Expand Up @@ -112,6 +115,7 @@ def sync_detailed(
acknowledged: Union[Unset, None, bool] = UNSET,
resolved: Union[Unset, None, bool] = UNSET,
muted: Union[Unset, None, bool] = UNSET,
severity: Union[Unset, None, int] = UNSET,
page: Union[Unset, None, int] = UNSET,
limit: Union[Unset, None, int] = UNSET,
filter_: Union[Unset, None, str] = UNSET,
Expand All @@ -131,6 +135,7 @@ def sync_detailed(
acknowledged (Union[Unset, None, bool]):
resolved (Union[Unset, None, bool]):
muted (Union[Unset, None, bool]):
severity (Union[Unset, None, int]):
page (Union[Unset, None, int]):
limit (Union[Unset, None, int]):
filter_ (Union[Unset, None, str]):
Expand All @@ -154,6 +159,7 @@ def sync_detailed(
acknowledged=acknowledged,
resolved=resolved,
muted=muted,
severity=severity,
page=page,
limit=limit,
filter_=filter_,
Expand All @@ -179,6 +185,7 @@ def sync(
acknowledged: Union[Unset, None, bool] = UNSET,
resolved: Union[Unset, None, bool] = UNSET,
muted: Union[Unset, None, bool] = UNSET,
severity: Union[Unset, None, int] = UNSET,
page: Union[Unset, None, int] = UNSET,
limit: Union[Unset, None, int] = UNSET,
filter_: Union[Unset, None, str] = UNSET,
Expand All @@ -198,6 +205,7 @@ def sync(
acknowledged (Union[Unset, None, bool]):
resolved (Union[Unset, None, bool]):
muted (Union[Unset, None, bool]):
severity (Union[Unset, None, int]):
page (Union[Unset, None, int]):
limit (Union[Unset, None, int]):
filter_ (Union[Unset, None, str]):
Expand All @@ -222,6 +230,7 @@ def sync(
acknowledged=acknowledged,
resolved=resolved,
muted=muted,
severity=severity,
page=page,
limit=limit,
filter_=filter_,
Expand All @@ -241,6 +250,7 @@ async def asyncio_detailed(
acknowledged: Union[Unset, None, bool] = UNSET,
resolved: Union[Unset, None, bool] = UNSET,
muted: Union[Unset, None, bool] = UNSET,
severity: Union[Unset, None, int] = UNSET,
page: Union[Unset, None, int] = UNSET,
limit: Union[Unset, None, int] = UNSET,
filter_: Union[Unset, None, str] = UNSET,
Expand All @@ -260,6 +270,7 @@ async def asyncio_detailed(
acknowledged (Union[Unset, None, bool]):
resolved (Union[Unset, None, bool]):
muted (Union[Unset, None, bool]):
severity (Union[Unset, None, int]):
page (Union[Unset, None, int]):
limit (Union[Unset, None, int]):
filter_ (Union[Unset, None, str]):
Expand All @@ -283,6 +294,7 @@ async def asyncio_detailed(
acknowledged=acknowledged,
resolved=resolved,
muted=muted,
severity=severity,
page=page,
limit=limit,
filter_=filter_,
Expand All @@ -306,6 +318,7 @@ async def asyncio(
acknowledged: Union[Unset, None, bool] = UNSET,
resolved: Union[Unset, None, bool] = UNSET,
muted: Union[Unset, None, bool] = UNSET,
severity: Union[Unset, None, int] = UNSET,
page: Union[Unset, None, int] = UNSET,
limit: Union[Unset, None, int] = UNSET,
filter_: Union[Unset, None, str] = UNSET,
Expand All @@ -325,6 +338,7 @@ async def asyncio(
acknowledged (Union[Unset, None, bool]):
resolved (Union[Unset, None, bool]):
muted (Union[Unset, None, bool]):
severity (Union[Unset, None, int]):
page (Union[Unset, None, int]):
limit (Union[Unset, None, int]):
filter_ (Union[Unset, None, str]):
Expand All @@ -350,6 +364,7 @@ async def asyncio(
acknowledged=acknowledged,
resolved=resolved,
muted=muted,
severity=severity,
page=page,
limit=limit,
filter_=filter_,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,225 @@
from http import HTTPStatus
from typing import Any, Dict, Optional, Union

import httpx

from ... import errors
from ...client import AuthenticatedClient, Client
from ...models.incident_status import IncidentStatus
from ...models.top_incident_grouping import TopIncidentGrouping
from ...models.top_incidents_model import TopIncidentsModel
from ...types import UNSET, Response, Unset


def _get_kwargs(
*,
status: Union[Unset, None, IncidentStatus] = UNSET,
group_by: Union[Unset, None, TopIncidentGrouping] = UNSET,
limit: Union[Unset, None, int] = UNSET,
days: Union[Unset, None, int] = UNSET,
) -> Dict[str, Any]:

pass

params: Dict[str, Any] = {}
json_status: Union[Unset, None, str] = UNSET
if not isinstance(status, Unset):
json_status = status.value if status else None

params["status"] = json_status

json_group_by: Union[Unset, None, str] = UNSET
if not isinstance(group_by, Unset):
json_group_by = group_by.value if group_by else None

params["groupBy"] = json_group_by

params["limit"] = limit

params["days"] = days

params = {k: v for k, v in params.items() if v is not UNSET and v is not None}

return {
"method": "get",
"url": "api/topincidents",
"params": params,
}


def _parse_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Optional[TopIncidentsModel]:
if response.status_code == HTTPStatus.OK:
response_200 = TopIncidentsModel.from_dict(response.json())

return response_200
if client.raise_on_unexpected_status:
raise errors.UnexpectedStatus(response.status_code, response.content)
else:
return None


def _build_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Response[TopIncidentsModel]:
return Response(
status_code=HTTPStatus(response.status_code),
content=response.content,
headers=response.headers,
parsed=_parse_response(client=client, response=response),
)


def sync_detailed(
*,
client: AuthenticatedClient,
status: Union[Unset, None, IncidentStatus] = UNSET,
group_by: Union[Unset, None, TopIncidentGrouping] = UNSET,
limit: Union[Unset, None, int] = UNSET,
days: Union[Unset, None, int] = UNSET,
) -> Response[TopIncidentsModel]:
"""findTopIncidentsGrouped
Finds the most recent incidents grouped by one of the incident's attribute, such as a data quality
dimension, a data quality check category or the connection name.
Args:
status (Union[Unset, None, IncidentStatus]):
group_by (Union[Unset, None, TopIncidentGrouping]):
limit (Union[Unset, None, int]):
days (Union[Unset, None, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[TopIncidentsModel]
"""

kwargs = _get_kwargs(
status=status,
group_by=group_by,
limit=limit,
days=days,
)

response = client.get_httpx_client().request(
**kwargs,
)

return _build_response(client=client, response=response)


def sync(
*,
client: AuthenticatedClient,
status: Union[Unset, None, IncidentStatus] = UNSET,
group_by: Union[Unset, None, TopIncidentGrouping] = UNSET,
limit: Union[Unset, None, int] = UNSET,
days: Union[Unset, None, int] = UNSET,
) -> Optional[TopIncidentsModel]:
"""findTopIncidentsGrouped
Finds the most recent incidents grouped by one of the incident's attribute, such as a data quality
dimension, a data quality check category or the connection name.
Args:
status (Union[Unset, None, IncidentStatus]):
group_by (Union[Unset, None, TopIncidentGrouping]):
limit (Union[Unset, None, int]):
days (Union[Unset, None, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
TopIncidentsModel
"""

return sync_detailed(
client=client,
status=status,
group_by=group_by,
limit=limit,
days=days,
).parsed


async def asyncio_detailed(
*,
client: AuthenticatedClient,
status: Union[Unset, None, IncidentStatus] = UNSET,
group_by: Union[Unset, None, TopIncidentGrouping] = UNSET,
limit: Union[Unset, None, int] = UNSET,
days: Union[Unset, None, int] = UNSET,
) -> Response[TopIncidentsModel]:
"""findTopIncidentsGrouped
Finds the most recent incidents grouped by one of the incident's attribute, such as a data quality
dimension, a data quality check category or the connection name.
Args:
status (Union[Unset, None, IncidentStatus]):
group_by (Union[Unset, None, TopIncidentGrouping]):
limit (Union[Unset, None, int]):
days (Union[Unset, None, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[TopIncidentsModel]
"""

kwargs = _get_kwargs(
status=status,
group_by=group_by,
limit=limit,
days=days,
)

response = await client.get_async_httpx_client().request(**kwargs)

return _build_response(client=client, response=response)


async def asyncio(
*,
client: AuthenticatedClient,
status: Union[Unset, None, IncidentStatus] = UNSET,
group_by: Union[Unset, None, TopIncidentGrouping] = UNSET,
limit: Union[Unset, None, int] = UNSET,
days: Union[Unset, None, int] = UNSET,
) -> Optional[TopIncidentsModel]:
"""findTopIncidentsGrouped
Finds the most recent incidents grouped by one of the incident's attribute, such as a data quality
dimension, a data quality check category or the connection name.
Args:
status (Union[Unset, None, IncidentStatus]):
group_by (Union[Unset, None, TopIncidentGrouping]):
limit (Union[Unset, None, int]):
days (Union[Unset, None, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
TopIncidentsModel
"""

return (
await asyncio_detailed(
client=client,
status=status,
group_by=group_by,
limit=limit,
days=days,
)
).parsed
Loading

0 comments on commit 5b36adb

Please sign in to comment.