Digital.ai Release Templates are easy to design and allow for multiple tools to integrate in organized process/flows. As our templates grow, so do the inputs, number of tasks, & phases. Currently, my team uses pytest and the Digital.ai REST API to gather facts about our templates and validates they can be used in an expected manor and that they can accept our defined input.

In order to pytest a template, a large number of REST Get Request have to be made to gather facts such as Folder IDs, Task IDs, & Release IDs. These preemptive tasks are done so that a final REST call can be made to start assertion testing.

The Idea I am proposing is for the Digital.ai Release templates to offer pre-generated testing functions since, the UI already has access to all the IDs required to gather facts about a template. These pre-generated tests would of course be boilerplate but it would quickly jumpstart all the foundational setup one needs to do before making assertion tests.

Below is a curated example of the work I have to-do in order to test if a phase exists the way I expect and need it to exist:
```Python
import requests
import pytest
import yaml

# ==================Standard fixtures for requests==================
@pytest.fixture
def url_fixture():
'''
This fixture is used to load the config.yml file
'''
with open('config.yml', mode='r', encoding='utf-8') as file_pointer:
config_data = yaml.safe_load(file_pointer)
return config_data['url']


@pytest.fixture
def auth_fixture():
'''
This fixture is used to load the config.yml file
'''
with open('config.yml', mode='r', encoding='utf-8') as file_pointer:
config_data = yaml.safe_load(file_pointer)
return (config_data['auth']['username'], config_data['auth']['password'])


# ==================Fixtures for getting folder ISs==================
@pytest.fixture
def could_patterns_fixture(url_fixture, auth_fixture):
'''
This fixture is used to get the folder ID of the Cloud Patterns folder
'''
response = requests.get(
f'{url_fixture}/api/v1/folders/find',
params={'byPath': 'Cloud Patterns'},
auth=auth_fixture,
verify=False,
timeout=None
)
return response.json()['id']


# ==================Fixtures for getting Pattern 1 Specific IDs & Paths==================
@pytest.fixture
def pattern_1_full_id_fixture(url_fixture, auth_fixture, could_patterns_fixture):
'''
This fixture is used to get the pattern 1 ID
'''
response = requests.get(
f'{url_fixture}/api/v1/releases/byTitle',
params={'releaseTitle': 'Pattern 1'},
auth=auth_fixture,
verify=False,
timeout=None
)
data = [ item for item in response.json() if could_patterns_fixture in item['id'] ]
return data[0]['id']


# ==================Start of pytest tests==================
@pytest.mark.smoke
@pytest.mark.smoke_pattern_1
@pytest.mark.dependency()
def test_validate_pattern_1(url_fixture, auth_fixture, could_patterns_fixture):
'''
This test checks to see if the pattern 1 template exsists
'''
response = requests.get(
f'{url_fixture}/api/v1/releases/byTitle',
params={'releaseTitle': 'Pattern 1'},
auth=auth_fixture,
verify=False,
timeout=None
)
data = [ item for item in response.json() if could_patterns_fixture in item['id'] and item['status'] == 'TEMPLATE' ]
assert response.status_code == 200
assert response.json() != []
assert data[0]['title'] == 'Pattern 1'
assert data[0]['status'] == 'TEMPLATE'
assert len(data[0]['id'].split('/')) == 3


@pytest.mark.smoke
@pytest.mark.smoke_pattern_1
@pytest.mark.dependency(depends=['test_validate_pattern_1'])
def test_validate_pattern_1_jira_phase(url_fixture, auth_fixture, pattern_1_full_id_fixture):
'''
This test will check if the 'Check Jira' phase exists
'''
response = requests.get(
f'{url_fixture}/api/v1/phases/byTitle',
params={'phaseTitle': 'Check Jira', 'releaseId': pattern_1_full_id_fixture},
auth=auth_fixture,
verify=False,
timeout=None
)
data = response.json()[0]
assert response.status_code == 200
assert response.json() != []
assert data['title'] == 'Check Jira'
assert data['type'] == 'xlrelease.Phase'
assert pattern_1_full_id_fixture in data['id']


@pytest.mark.smoke
@pytest.mark.smoke_pattern_1
@pytest.mark.dependency(depends=['test_validate_pattern_1'])
def test_validate_pattern_1_variable_phase(url_fixture, auth_fixture, pattern_1_full_id_fixture):
'''
This test will check if the 'Release Variable Setup' phase exists
'''
response = requests.get(
f'{url_fixture}/api/v1/phases/byTitle',
params={'phaseTitle': 'Release Variable Setup', 'releaseId': pattern_1_full_id_fixture},
auth=auth_fixture,
verify=False,
timeout=None
)
data = response.json()[0]
assert response.status_code == 200
assert response.json() != []
assert data['title'] == 'Release Variable Setup'
assert data['type'] == 'xlrelease.Phase'
assert pattern_1_full_id_fixture in data['id']


@pytest.mark.smoke
@pytest.mark.smoke_pattern_1
@pytest.mark.dependency(depends=['test_validate_pattern_1'])
def test_validate_pattern_1_xld_phase(url_fixture, auth_fixture, pattern_1_full_id_fixture):
'''
This test will check if the 'D.ai Deploy Configuration' phase exists
'''
response = requests.get(
f'{url_fixture}/api/v1/phases/byTitle',
params={'phaseTitle': 'D.ai Deploy Configuration', 'releaseId': pattern_1_full_id_fixture},
auth=auth_fixture,
verify=False,
timeout=None
)
data = response.json()[0]
assert response.status_code == 200
assert response.json() != []
assert data['title'] == 'D.ai Deploy Configuration'
assert data['type'] == 'xlrelease.Phase'
assert pattern_1_full_id_fixture in data['id']


@pytest.mark.smoke
@pytest.mark.smoke_pattern_1
@pytest.mark.dependency(depends=['test_validate_pattern_1'])
def test_validate_pattern_1_deploy_phase(url_fixture, auth_fixture, pattern_1_full_id_fixture):
'''
This test will check if the 'Deploy Specs' phase exists
'''
response = requests.get(
f'{url_fixture}/api/v1/phases/byTitle',
params={'phaseTitle': 'Deploy Specs', 'releaseId': pattern_1_full_id_fixture},
auth=auth_fixture,
verify=False,
timeout=None
)
data = response.json()[0]
assert response.status_code == 200
assert response.json() != []
assert data['title'] == 'Deploy Specs'
assert data['type'] == 'xlrelease.Phase'
assert pattern_1_full_id_fixture in data['id']


@pytest.mark.smoke
@pytest.mark.smoke_pattern_1
@pytest.mark.dependency(depends=['test_validate_pattern_1'])
def test_validate_pattern_1_output_phase(url_fixture, auth_fixture, pattern_1_full_id_fixture):
'''
This test will check if the 'Process Output Report' phase exists
'''
response = requests.get(
f'{url_fixture}/api/v1/phases/byTitle',
params={'phaseTitle': 'Process Output Report', 'releaseId': pattern_1_full_id_fixture},
auth=auth_fixture,
verify=False,
timeout=None
)
data = response.json()[0]
assert response.status_code == 200
assert response.json() != []
assert data['title'] == 'Process Output Report'
assert data['type'] == 'xlrelease.Phase'
assert pattern_1_full_id_fixture in data['id']
```

Comments

  • In response to your request for simplifying the testing and integration of Digital.ai Release Templates with pytest, we recommend representing your templates as YAML code and storing them in Git version control system with Folder Versioning feature. By using YAML files, you can create pytest test cases that parse these files to validate whether the templates align with your requirements. This approach eliminates the need for extensive REST API calls, streamlining the testing process. Furthermore, it enhances collaboration and version control for your release templates, making it easier to track changes and work as a team.