Skip to content

Commit 0658056

Browse files
committed
Merge remote-tracking branch 'upstream/mvp_demo' into bulk-validations-test
# Conflicts: # tests/scripts/helpers/utils.py
2 parents 366bc5a + 81a6178 commit 0658056

File tree

10 files changed

+645
-13
lines changed

10 files changed

+645
-13
lines changed

Dockerfile.autotune

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,12 @@
1616
##########################################################
1717
# Build Docker Image
1818
##########################################################
19-
FROM registry.access.redhat.com/ubi9/ubi-minimal:9.6-1760515502 as mvnbuild-jdk21
19+
FROM registry.access.redhat.com/ubi10/ubi-minimal:10.1-1764604111 as mvnbuild-jdk25
2020
ARG USER=autotune
2121
ARG AUTOTUNE_HOME=/home/$USER
2222

23-
RUN microdnf --setopt=install_weak_deps=0 --setopt=tsflags=nodocs install -y java-21-openjdk-devel \
24-
tar gzip java-21-openjdk-jmods binutils git vim \
23+
RUN microdnf --setopt=install_weak_deps=0 --setopt=tsflags=nodocs install -y java-25-openjdk-devel \
24+
tar gzip java-25-openjdk-jmods binutils git vim \
2525
&& microdnf clean all
2626

2727
RUN mkdir -p /usr/share/maven /usr/share/maven/ref \
@@ -48,7 +48,7 @@ RUN jlink --strip-debug --compress 2 --no-header-files --no-man-pages --module-p
4848
# Runtime Docker Image
4949
##########################################################
5050
# Use ubi-minimal as the base image
51-
FROM registry.access.redhat.com/ubi9/ubi-minimal:9.6-1760515502
51+
FROM registry.access.redhat.com/ubi10/ubi-minimal:10.1-1764604111
5252

5353
ARG AUTOTUNE_VERSION
5454
ARG USER=autotune
@@ -85,9 +85,9 @@ RUN microdnf -y install shadow-utils \
8585
USER ${UID}
8686

8787
# Copy the jlinked JRE
88-
COPY --chown=${UID}:0 --from=mvnbuild-jdk21 ${AUTOTUNE_HOME}/src/autotune/jre/ ${AUTOTUNE_HOME}/app/jre/
88+
COPY --chown=${UID}:0 --from=mvnbuild-jdk25 ${AUTOTUNE_HOME}/src/autotune/jre/ ${AUTOTUNE_HOME}/app/jre/
8989
# Copy the app binaries
90-
COPY --chown=${UID}:0 --from=mvnbuild-jdk21 ${AUTOTUNE_HOME}/src/autotune/target/ ${AUTOTUNE_HOME}/app/target/
90+
COPY --chown=${UID}:0 --from=mvnbuild-jdk25 ${AUTOTUNE_HOME}/src/autotune/target/ ${AUTOTUNE_HOME}/app/target/
9191

9292
# Copy the metric and metadata profile JSON file path into the runtime image
9393
COPY manifests/autotune/performance-profiles/resource_optimization_local_monitoring.json ${AUTOTUNE_HOME}/app/manifests/autotune/performance-profiles/resource_optimization_local_monitoring.json

design/PerformanceProfileAPI.md

Lines changed: 39 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -311,15 +311,14 @@ List performance profiles output JSON as follows.
311311
]
312312
```
313313

314-
315314
## UpdatePerformanceProfile
316315

317316
This is quick guide instructions to update performance profile using input JSON as follows.
318317

319318
**Request**
320-
`POST /updatePerformanceProfile`
319+
`PUT /updatePerformanceProfile`
321320

322-
`curl -H 'Accept: application/json' -X POST --data 'copy paste below JSON' http://<URL>:<PORT>/updatePerformanceProfile`
321+
`curl -H 'Accept: application/json' -X PUT --data 'copy paste below JSON' http://<URL>:<PORT>/updatePerformanceProfile`
323322

324323
```
325324
{
@@ -761,3 +760,40 @@ This is quick guide instructions to update performance profile using input JSON
761760
}
762761
```
763762
#### Note: One of query or aggregation_functions is mandatory. Both can be present together.
763+
764+
765+
## DeletePerformanceProfile
766+
767+
This is quick guide instructions to delete performance profile using input param as follows.
768+
769+
**Request Parameters**
770+
771+
| Parameter | Type | Required | Description |
772+
|-----------|--------|----------|-------------------------------------|
773+
| name | string | required | The name of the performance profile |
774+
775+
776+
**Request with name query parameter**
777+
778+
`DELETE /deletePerformanceProfile`
779+
780+
`curl -H 'Accept: application/json' -X DELETE http://<URL>:<PORT>/deletePerformanceProfile?name=resource-optimization-openshift`
781+
782+
Deletes the specified performance profile, provided it is already created
783+
784+
<details>
785+
<summary><b>Response</b></summary>
786+
787+
788+
```json
789+
{
790+
"message": "Performance profile resource-optimization-openshift deleted successfully. View Performance Profiles at /listPerformanceProfiles",
791+
"httpcode": 201,
792+
"documentationLink": "",
793+
"status": "SUCCESS"
794+
}
795+
```
796+
797+
</details>
798+
799+
<br>

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
<org-json-version>20240303</org-json-version>
1313
<jetty-version>12.0.12</jetty-version>
1414
<slf4j-version>2.17.1</slf4j-version>
15-
<java-version>17</java-version>
15+
<java-version>25</java-version>
1616
<prometheus-simpleclient>0.14.1</prometheus-simpleclient>
1717
<gson-version>2.9.0</gson-version>
1818
<maven-compiler-plugin-version>3.8.0</maven-compiler-plugin-version>

tests/scripts/helpers/kruize.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -253,8 +253,11 @@ def delete_performance_profile(input_json_file, invalid_header=False):
253253
print("\nDeleting the performance profile...")
254254
url = URL + "/deletePerformanceProfile"
255255

256-
performance_profile_name = input_json['name']
257-
query_string = f"name={performance_profile_name}"
256+
try:
257+
performance_profile_name = input_json['name']
258+
query_string = f"name={performance_profile_name}"
259+
except KeyError:
260+
query_string = ""
258261

259262
if query_string:
260263
url += "?" + query_string

tests/scripts/helpers/utils.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -112,11 +112,16 @@
112112
COST_LIMITS_CPU_NO_RECOMMENDATIONS_MSG = "CPU recommendations missing"
113113
COST_LIMITS_MEM_NO_RECOMMENDATIONS_MSG = "Memory recommendations missing"
114114
CREATE_PERF_PROFILE_SUCCESS_MSG = "Performance Profile : %s created successfully."
115+
CREATE_PERF_PROFILE_DUPLICATE_RECORD_MSG = "Validation failed: Performance Profile already exists: %s"
115116
UPDATE_PERF_PROFILE_SUCCESS_MSG = "Performance Profile '%s' updated successfully to version %s. View Performance Profiles at /listPerformanceProfiles"
116117
UPDATE_PERF_PROFILE_MISSING_PROFILE_ERROR_MSG = "Validation failed: Performance Profile '%s' not found. Use POST to create a new profile."
117118
UPDATE_PERF_PROFILE_ALREADY_UPDATED_MSG = "Validation failed: Performance profile '%s' already updated with the version %.1f"
118119
UPDATE_PERF_PROFILE_SLO_ALREADY_UPDATED_MSG = "Validation failed: Performance profile '%s' already updated with the provided SLO data"
119120
UPDATE_PERF_PROFILE_SUPERSET_ERROR = "Validation failed: Updated profile must be a superset of existing data"
121+
DELETE_PERF_PROFILE_SUCCESS_MSG = "Performance profile %s deleted successfully. View Performance Profiles at /listPerformanceProfiles"
122+
DELETE_PERF_PROFILE_MISSING_NAME_ERROR = "Performance profile name is required."
123+
DELETE_PERF_PROFILE_NON_EXISTENT_NAME_ERROR = "Not Found: performance_profile does not exist: %s"
124+
DELETE_PERF_PROFILE_EXPERIMENT_ASSOCIATION_ERROR = "Performance Profile '%s' cannot be deleted as it is currently associated with %d experiment."
120125
DATASOURCE_NOT_SERVICEABLE = "Datasource is not serviceable."
121126

122127

tests/scripts/remote_monitoring_tests/Remote_monitoring_tests.md

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -147,6 +147,20 @@ The above tests are developed using pytest framework and the tests are run using
147147
- Creates a resource optimization performance profile using the [createPerformanceProfile API](/design/PerformanceProfileAPI.md)
148148
- Runs the above tests using pytest
149149

150+
### **Create Performance Profile API tests**
151+
152+
Here are the test scenarios:
153+
- Create performance profile with a valid version
154+
- Create performance profile with duplicate data
155+
- Create performance profile with missing mandatory fields
156+
157+
### **List Performance Profile API tests**
158+
159+
Here are the test scenarios:
160+
- List performance profile with no profiles
161+
- List performance profile with only one profile present
162+
- List performance profile with multiple profiles present
163+
150164
### **Update Performance Profile API tests**
151165

152166
Here are the test scenarios:
@@ -157,6 +171,13 @@ Here are the test scenarios:
157171
- Update performance profile with invalid superset data
158172
- Update performance profile with missing mandatory fields
159173

174+
### **Delete Performance Profile API tests**
175+
176+
Here are the test scenarios:
177+
- Delete performance profile with a valid version
178+
- Delete performance profile with invalid profile name scenarios
179+
- Delete performance profile with when its associated with existing experiments
180+
160181

161182
## Prerequisites for running the tests:
162183
- Minikube setup or access to Openshift cluster
Lines changed: 203 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,203 @@
1+
"""
2+
Copyright (c) 2025 IBM Corporation and others.
3+
4+
Licensed under the Apache License, Version 2.0 (the "License");
5+
you may not use this file except in compliance with the License.
6+
You may obtain a copy of the License at
7+
8+
http://www.apache.org/licenses/LICENSE-2.0
9+
10+
Unless required by applicable law or agreed to in writing, software
11+
distributed under the License is distributed on an "AS IS" BASIS,
12+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
See the License for the specific language governing permissions and
14+
limitations under the License.
15+
"""
16+
import tempfile
17+
18+
import pytest
19+
import sys
20+
21+
22+
from helpers.list_metric_profiles_validate import *
23+
sys.path.append("../../")
24+
25+
from helpers.fixtures import *
26+
from helpers.utils import *
27+
28+
perf_profile_dir = get_metric_profile_dir()
29+
mandatory_fields = [
30+
("name", ERROR_STATUS_CODE, ERROR_STATUS),
31+
("profile_version", ERROR_STATUS_CODE, ERROR_STATUS),
32+
("sloInfo", ERROR_STATUS_CODE, ERROR_STATUS),
33+
("direction", ERROR_STATUS_CODE, ERROR_STATUS),
34+
("objective_function", ERROR_STATUS_CODE, ERROR_STATUS),
35+
("function_type", ERROR_STATUS_CODE, ERROR_STATUS),
36+
("function_variables", ERROR_STATUS_CODE, ERROR_STATUS),
37+
("metric_name", ERROR_STATUS_CODE, ERROR_STATUS),
38+
("datasource", ERROR_STATUS_CODE, ERROR_STATUS),
39+
("value_type", ERROR_STATUS_CODE, ERROR_STATUS),
40+
("aggregation_functions", ERROR_STATUS_CODE, ERROR_STATUS),
41+
("function", ERROR_STATUS_CODE, ERROR_STATUS),
42+
("query", ERROR_STATUS_CODE, ERROR_STATUS)
43+
]
44+
45+
@pytest.mark.perf_profile
46+
def test_create_performance_profile(cluster_type):
47+
"""
48+
Test Description: This test validates the response status code of createPerformanceProfile API by passing a
49+
valid input for the json
50+
"""
51+
# Form the kruize url
52+
form_kruize_url(cluster_type)
53+
perf_profile_json_file = perf_profile_dir / 'resource_optimization_openshift.json'
54+
# Delete any existing profile
55+
response = delete_performance_profile(perf_profile_json_file)
56+
print("delete API status code = ", response.status_code)
57+
data = response.json()
58+
print("delete API status message = ", data["message"])
59+
60+
# Create the performance profile
61+
response = create_performance_profile(perf_profile_json_file)
62+
data = response.json()
63+
print(data['message'])
64+
65+
with open(perf_profile_json_file, "r") as f:
66+
json_data = json.load(f)
67+
perf_profile_name = json_data["name"]
68+
perf_profile_version = json_data["profile_version"]
69+
70+
assert response.status_code == SUCCESS_STATUS_CODE
71+
assert data['status'] == SUCCESS_STATUS
72+
assert CREATE_PERF_PROFILE_SUCCESS_MSG % perf_profile_name in data['message']
73+
74+
# Validate using listPerformanceProfile API
75+
response = list_performance_profiles()
76+
perf_profile_version_response = response.json()[0]["profile_version"]
77+
assert perf_profile_version_response == perf_profile_version
78+
79+
perf_profile_json = response.json()
80+
# Validate the json against the json schema
81+
errorMsg = validate_list_metric_profiles_json(perf_profile_json, list_metric_profiles_schema)
82+
assert errorMsg == ""
83+
84+
response = delete_performance_profile(perf_profile_json_file)
85+
print("delete performance profile = ", response.status_code)
86+
87+
88+
@pytest.mark.perf_profile
89+
def test_create_performance_profile_with_duplicate_data(cluster_type):
90+
"""
91+
Test Description: This test validates the response message of createPerformanceProfile API by passing the same data twice
92+
"""
93+
# Form the kruize url
94+
form_kruize_url(cluster_type)
95+
perf_profile_json_file = perf_profile_dir / 'resource_optimization_openshift.json'
96+
# Delete any existing profile
97+
response = delete_performance_profile(perf_profile_json_file)
98+
print("delete API status code = ", response.status_code)
99+
data = response.json()
100+
print("delete API status message = ", data["message"])
101+
102+
# Create the performance profile
103+
response = create_performance_profile(perf_profile_json_file)
104+
data = response.json()
105+
print(data['message'])
106+
107+
with open(perf_profile_json_file, "r") as f:
108+
json_data = json.load(f)
109+
perf_profile_name = json_data["name"]
110+
perf_profile_version = json_data["profile_version"]
111+
112+
assert response.status_code == SUCCESS_STATUS_CODE
113+
assert data['status'] == SUCCESS_STATUS
114+
assert CREATE_PERF_PROFILE_SUCCESS_MSG % perf_profile_name in data['message']
115+
116+
# Validate using listPerformanceProfile API
117+
response = list_performance_profiles()
118+
perf_profile_version_response = response.json()[0]["profile_version"]
119+
assert perf_profile_version_response == perf_profile_version
120+
121+
# create the performance profile again
122+
response = create_performance_profile(perf_profile_json_file)
123+
data = response.json()
124+
print(data['message'])
125+
126+
assert response.status_code == ERROR_409_STATUS_CODE
127+
assert data['status'] == ERROR_STATUS
128+
assert data['message'] == CREATE_PERF_PROFILE_DUPLICATE_RECORD_MSG % perf_profile_name
129+
130+
response = delete_performance_profile(perf_profile_json_file)
131+
print("delete performance profile = ", response.status_code)
132+
133+
134+
@pytest.mark.perf_profile
135+
@pytest.mark.parametrize("field, expected_status_code, expected_status", mandatory_fields)
136+
def test_create_performance_profiles_mandatory_fields(cluster_type, field, expected_status_code, expected_status):
137+
"""
138+
Test Description: This test Validates error response of createPerformanceProfile API when mandatory fields are missing.
139+
"""
140+
141+
# Form the kruize url
142+
form_kruize_url(cluster_type)
143+
input_json_file = perf_profile_dir / 'resource_optimization_openshift.json'
144+
# Delete any existing profile
145+
response = delete_performance_profile(input_json_file)
146+
print("delete API status code = ", response.status_code)
147+
data = response.json()
148+
print("delete API status message = ", data["message"])
149+
150+
json_file = "/tmp/create_performance_profile.json"
151+
json_data = json.load(open(input_json_file))
152+
153+
if field == "name":
154+
json_data.pop("name", None)
155+
elif field == "profile_version":
156+
json_data.pop("profile_version", None)
157+
elif field == "sloInfo":
158+
json_data.pop("slo", None)
159+
elif field == "direction":
160+
json_data['slo'].pop("direction", None)
161+
elif field == "objective_function":
162+
json_data['slo'].pop("objective_function", None)
163+
field = "objectiveFunction"
164+
elif field == "function_type":
165+
json_data['slo']['objective_function'].pop("function_type", None)
166+
elif field == "function_variables":
167+
json_data['slo'].pop("function_variables", None)
168+
field = "functionVariables"
169+
elif field == "metric_name":
170+
json_data['slo']['function_variables'][0].pop("name", None)
171+
field = "name"
172+
elif field == "datasource":
173+
json_data['slo']['function_variables'][0].pop("datasource", None)
174+
elif field == "value_type":
175+
json_data['slo']['function_variables'][0].pop("value_type", None)
176+
field = "valueType"
177+
elif field == "aggregation_functions":
178+
json_data['slo']['function_variables'][0].pop("aggregation_functions", None)
179+
elif field == "function":
180+
json_data['slo']['function_variables'][0]['aggregation_functions'][0].pop("function", None)
181+
elif field == "query":
182+
json_data['slo']['function_variables'][0]['aggregation_functions'][0].pop("query", None)
183+
184+
print("\n*****************************************")
185+
print(json_data)
186+
print("*****************************************\n")
187+
data = json.dumps(json_data)
188+
with open(json_file, 'w') as file:
189+
file.write(data)
190+
191+
# Create performance profile using the specified json
192+
response = create_performance_profile(json_file)
193+
data = response.json()
194+
print(data['message'])
195+
196+
assert response.status_code == expected_status_code, \
197+
f"Mandatory field check failed for {field} actual - {response.status_code} expected - {expected_status_code}"
198+
assert data['status'] == expected_status
199+
200+
if field == "aggregation_functions":
201+
assert data['message'] == AGGR_FUNC_MISSING_MANDATORY_PARAMETERS_MSG
202+
else:
203+
assert data['message'] == CREATE_METRIC_PROFILE_MISSING_MANDATORY_PARAMETERS_MSG % field

0 commit comments

Comments
 (0)