diff --git a/scripts/release_issue_status/auto_close.py b/scripts/release_issue_status/auto_close.py
new file mode 100644
index 000000000000..ca439398aa59
--- /dev/null
+++ b/scripts/release_issue_status/auto_close.py
@@ -0,0 +1,36 @@
+import datetime
+
+import requests
+from bs4 import BeautifulSoup
+
+
+def auto_close_issue(sdk_repo, item):
+ issue_number, package_name = item.issue_object.number, item.package
+ issue_info = sdk_repo.get_issue(number=issue_number)
+ issue_author = issue_info.user.login
+ last_comment = list(issue_info.get_comments())[-1]
+ last_comment_date = last_comment.created_at
+ last_version, last_time = get_last_released_date(package_name)
+ if last_time and last_time > last_comment_date:
+ comment = f'Hi @{issue_author}, pypi link: https://pypi.org/project/{package_name}/{last_version}/'
+ issue_info.create_comment(body=comment)
+ issue_info.edit(state='closed')
+ item.labels.append('auto-closed')
+ item.issue_object.set_labels(*item.labels)
+ print(f"issue number:{issue_number} has been closed!")
+
+
+def get_last_released_date(package_name):
+ pypi_link = f'https://pypi.org/project/{package_name}/#history'
+ res = requests.get(pypi_link)
+ soup = BeautifulSoup(res.text, 'html.parser')
+ # find top div from
+ try:
+ package_info = soup.select('div[class="release-timeline"]')[0].find_all('div')[0]
+ last_version_mix = package_info.find_all('p', class_="release__version")[0].contents[0]
+ except IndexError as e:
+ return '', ''
+ last_version = last_version_mix.replace(' ', '').replace('\n', '')
+ last_version_date_str = package_info.time.attrs['datetime'].split('+')[0]
+ last_version_date = datetime.datetime.strptime(last_version_date_str, '%Y-%m-%dT%H:%M:%S')
+ return last_version, last_version_date
diff --git a/scripts/release_issue_status/auto_pipeline_run.py b/scripts/release_issue_status/auto_pipeline_run.py
new file mode 100644
index 000000000000..ef35ee2612fe
--- /dev/null
+++ b/scripts/release_issue_status/auto_pipeline_run.py
@@ -0,0 +1,47 @@
+import json
+import os
+import re
+from msrest.authentication import BasicAuthentication
+from azure.devops.v6_0.pipelines.pipelines_client import PipelinesClient
+from azure.devops.v6_0.pipelines import models
+import requests
+
+
+def run_pipeline(issue_link, sdk_issue_object, pipeline_url):
+ paramaters = {
+ "stages_to_skip": [],
+ "resources": {
+ "repositories": {
+ "self": {
+ "refName": "refs/heads/main"
+ }
+ }
+ },
+ "variables": {
+ "BASE_BRANCH": {
+ "value": f"{sdk_issue_object.head.label}",
+ "isSecret": False
+ },
+ "ISSUE_LINK": {
+ "value": f"{issue_link}",
+ "isSecret": False
+ },
+ "PIPELINE_LINK": {
+ "value": f"{pipeline_url}",
+ "isSecret": False
+ }
+ }
+ }
+ # Fill in with your personal access token and org URL
+ personal_access_token = os.getenv('PIPELINE_TOKEN')
+ organization_url = 'https://dev.azure.com/azure-sdk'
+
+ # Create a connection to the org
+ credentials = BasicAuthentication('', personal_access_token)
+ run_parameters = models.RunPipelineParameters(**paramaters)
+ client = PipelinesClient(base_url=organization_url, creds=credentials)
+ result = client.run_pipeline(project='internal',pipeline_id=2500,run_parameters=run_parameters)
+ if result.state == 'inProgress':
+ return True
+ else:
+ return False
diff --git a/scripts/release_issue_status/get_python_pipeline.py b/scripts/release_issue_status/get_python_pipeline.py
new file mode 100644
index 000000000000..5b0939a704d2
--- /dev/null
+++ b/scripts/release_issue_status/get_python_pipeline.py
@@ -0,0 +1,27 @@
+import os
+import re
+
+from msrest.authentication import BasicAuthentication
+from azure.devops.v6_0.pipelines.pipelines_client import PipelinesClient
+
+
+def get_python_pipelines():
+ python_piplines = {}
+ pipeline_client = PipelinesClient(base_url='https://dev.azure.com/azure-sdk',
+ creds=BasicAuthentication('', os.getenv('PIPELINE_TOKEN')))
+ pipelines = pipeline_client.list_pipelines(project='internal')
+ for pipeline in pipelines:
+ if re.findall('^python - \w*$', pipeline.name):
+ key = pipeline.name.replace('python - ', '')
+ python_piplines[key] = pipeline.id
+ return python_piplines
+
+
+def get_pipeline_url(python_piplines, output_folder):
+ definitionId = python_piplines.get(output_folder)
+ if definitionId:
+ pipeline_url = 'https://dev.azure.com/azure-sdk/internal/_build?definitionId={}'.format(definitionId)
+ else:
+ print('Cannot find definitionId, Do not display pipeline_url')
+ pipeline_url = ''
+ return pipeline_url
diff --git a/scripts/release_issue_status/main.py b/scripts/release_issue_status/main.py
index 6bdf38279c4f..64c5d1ac4bd5 100644
--- a/scripts/release_issue_status/main.py
+++ b/scripts/release_issue_status/main.py
@@ -1,13 +1,18 @@
import time
import os
import re
-from github import Github
from datetime import date, datetime
import subprocess as sp
+import traceback
+
+from github import Github
from azure.storage.blob import BlobClient
+
import reply_generator as rg
-from update_issue_body import update_issue_body, find_readme_link
-import traceback
+from update_issue_body import update_issue_body, find_readme_and_output_folder
+from auto_close import auto_close_issue
+from get_python_pipeline import get_python_pipelines, get_pipeline_url
+
_NULL = ' '
_FILE_OUT = 'release_issue_status.csv'
@@ -121,11 +126,16 @@ def _latest_comment_time(comments, delay_from_create_date):
return delay_from_create_date if not q else int((time.time() - q[-1][0]) / 3600 / 24)
-def auto_reply(item, sdk_repo, rest_repo, duplicated_issue):
+def auto_reply(item, request_repo, rest_repo, sdk_repo, duplicated_issue, python_piplines):
print("==========new issue number: {}".format(item.issue_object.number))
+ if 'Configured' in item.labels:
+ item.labels.remove('Configured')
+
if 'auto-link' not in item.labels:
+ item.labels.append('auto-link')
+ item.issue_object.set_labels(*item.labels)
try:
- package_name, readme_link = update_issue_body(sdk_repo, rest_repo, item.issue_object.number)
+ package_name, readme_link, output_folder = update_issue_body(request_repo, rest_repo, item.issue_object.number)
print("pkname, readme", package_name, readme_link)
item.package = package_name
key = ('Python', item.package)
@@ -133,20 +143,24 @@ def auto_reply(item, sdk_repo, rest_repo, duplicated_issue):
except Exception as e:
item.bot_advice = 'failed to modify the body of the new issue. Please modify manually'
item.labels.append('attention')
+ item.issue_object.set_labels(*item.labels)
print(e)
raise
- item.labels.append('auto-link')
- item.issue_object.set_labels(*item.labels)
else:
try:
- readme_link = find_readme_link(sdk_repo, item.issue_object.number)
+ readme_link, output_folder = find_readme_and_output_folder(request_repo, rest_repo, item.issue_object.number)
except Exception as e:
print('Issue: {} updates body failed'.format(item.issue_object.number))
item.bot_advice = 'failed to find Readme link, Please check !!'
item.labels.append('attention')
+ item.issue_object.set_labels(*item.labels)
raise
try:
- reply = rg.begin_reply_generate(item=item, rest_repo=rest_repo, readme_link=readme_link)
+ print("*********************")
+ print(python_piplines)
+ pipeline_url = get_pipeline_url(python_piplines, output_folder)
+ rg.begin_reply_generate(item=item, rest_repo=rest_repo, readme_link=readme_link,
+ sdk_repo=sdk_repo, pipeline_url=pipeline_url)
except Exception as e:
item.bot_advice = 'auto reply failed, Please intervene manually !!'
print('Error from auto reply ========================')
@@ -158,14 +172,18 @@ def auto_reply(item, sdk_repo, rest_repo, duplicated_issue):
def main():
# get latest issue status
g = Github(os.getenv('TOKEN')) # please fill user_token
- sdk_repo = g.get_repo('Azure/sdk-release-request')
- rest_repo = g.get_repo('Azure/azure-rest-api-specs')
- label1 = sdk_repo.get_label('ManagementPlane')
- open_issues = sdk_repo.get_issues(state='open', labels=[label1])
+ request_repo = g.get_repo('Azure/sdk-release-request')
+ rest_repo = g.get_repo('Azure/azure-rest-api-specs')
+ sdk_repo = g.get_repo('Azure/azure-sdk-for-python')
+ label1 = request_repo.get_label('ManagementPlane')
+ open_issues = request_repo.get_issues(state='open', labels=[label1])
issue_status = []
issue_status_python = []
duplicated_issue = dict()
start_time = time.time()
+ # get pipeline definitionid
+ python_piplines = get_python_pipelines()
+
for item in open_issues:
if not item.number:
continue
@@ -204,16 +222,22 @@ def main():
for item in issue_status:
if item.status == 'release':
item.bot_advice = 'better to release asap.'
- elif item.comment_num == 0 and 'Python' in item.labels:
+ elif (item.comment_num == 0 or 'Configured' in item.labels) and 'Python' in item.labels:
item.bot_advice = 'new issue and better to confirm quickly.'
try:
- auto_reply(item, sdk_repo, rest_repo, duplicated_issue)
+ auto_reply(item, request_repo, rest_repo, sdk_repo, duplicated_issue, python_piplines)
except Exception as e:
continue
elif not item.author_latest_comment in _PYTHON_SDK_ADMINISTRATORS:
item.bot_advice = 'new comment for author.'
elif item.delay_from_latest_update >= 7:
item.bot_advice = 'delay for a long time and better to handle now.'
+ if item.comment_num > 1 and item.language == 'Python':
+ try:
+ auto_close_issue(request_repo, item)
+ except Exception as e:
+ item.bot_advice = 'auto-close failed, please check!'
+ print(f"=====issue: {item.issue_object.number}, {e}")
if item.days_from_latest_commit >= 30 and item.language == 'Python' and '30days attention' not in item.labels:
item.labels.append('30days attention')
@@ -243,10 +267,10 @@ def main():
print_check('git push -f origin HEAD')
# upload to storage account(it is created in advance)
- blob = BlobClient.from_connection_string(conn_str=os.getenv('CONN_STR'), container_name=os.getenv('FILE'),
- blob_name=_FILE_OUT)
- with open(_FILE_OUT, 'rb') as data:
- blob.upload_blob(data, overwrite=True)
+# blob = BlobClient.from_connection_string(conn_str=os.getenv('CONN_STR'), container_name=os.getenv('FILE'),
+# blob_name=_FILE_OUT)
+# with open(_FILE_OUT, 'rb') as data:
+# blob.upload_blob(data, overwrite=True)
if __name__ == '__main__':
diff --git a/scripts/release_issue_status/release_issue_status.yml b/scripts/release_issue_status/release_issue_status.yml
index 3d4fd02b66ad..0e3146fdee9e 100644
--- a/scripts/release_issue_status/release_issue_status.yml
+++ b/scripts/release_issue_status/release_issue_status.yml
@@ -36,6 +36,9 @@ jobs:
export CONN_STR=$(ENV_CONN_STR)
export FILE=$(ENV_FILE)
export TOKEN=$(USR_TOKEN)
+ export HEADERS=$(PIPELINE_HEADERS)
+ export URL=$(PIPELINE_URL)
+ export PIPELINE_TOKEN = $(PIPELINE_TOKEN)
# create virtual env
python -m venv venv-sdk
diff --git a/scripts/release_issue_status/reply_generator.py b/scripts/release_issue_status/reply_generator.py
index dbe6494bd7d1..eddf8a9fee02 100644
--- a/scripts/release_issue_status/reply_generator.py
+++ b/scripts/release_issue_status/reply_generator.py
@@ -1,3 +1,4 @@
+import auto_pipeline_run as apr
import re
issue_object_rg = None
@@ -7,11 +8,10 @@ def weather_change_readme(rest_repo, link_dict, labels):
# to see whether need change readme
contents = str(rest_repo.get_contents(link_dict['readme_path']).decoded_content)
pattern_tag = re.compile(r'tag: package-[\w+-.]+')
- package_tag = pattern_tag.search(contents).group()
- package_tag = package_tag.split(':')[1].strip()
+ package_tag = pattern_tag.findall(contents)
readme_python_contents = str(rest_repo.get_contents(link_dict['readme_python_path']).decoded_content)
whether_multi_api = 'multi-api' in readme_python_contents
- whether_same_tag = package_tag == link_dict['readme_tag']
+ whether_same_tag = link_dict['readme_tag'] in package_tag
whether_change_readme = not whether_same_tag or whether_multi_api and not 'MultiAPI' in labels
return whether_change_readme
@@ -28,7 +28,7 @@ def get_links(readme_link):
resource_manager = pattern_resource_manager.search(readme_link).group()
link_dict['readme_path'] = readme_path
link_dict['readme_python_path'] = readme_path[:readme_path.rfind('/')] + '/readme.python.md'
- link_dict['readme_tag'] = readme_tag
+ link_dict['readme_tag'] = 'tag: ' + readme_tag
link_dict['resource_manager'] = resource_manager
return link_dict
@@ -76,13 +76,16 @@ def swagger_generator_parse(context, latest_pr_number):
python_track2_info = re.search(pattern_python_track2, python).group()
track2_info_model = ' python-track2
{} '.format(
python_track2_info)
+ pattern_sdk_changes = re.compile('/azure-sdk-for-python/pull/\d*">Release SDK Changes', re.DOTALL)
+ sdk_link = re.search(pattern_sdk_changes, python_track2_info).group()
+ sdk_link_number = re.search(re.compile('[0-9]+'), sdk_link).group()
info_model = 'hi @{} Please check the package whether works well and the changelog info is as below:\n' \
'{}\n{}\n' \
'\n* (The version of the package is only a temporary version for testing)\n' \
'\nhttps://github.com/Azure/azure-rest-api-specs/pull/{}\n' \
.format(issue_object_rg.user.login, track1_info_model, track2_info_model, str(latest_pr_number))
- return info_model
+ return info_model, sdk_link_number
def reply_owner(reply_content):
@@ -95,7 +98,7 @@ def add_label(label_name, labels):
issue_object_rg.set_labels(*labels)
-def begin_reply_generate(item, rest_repo, readme_link):
+def begin_reply_generate(item, rest_repo, readme_link, sdk_repo, pipeline_url):
global issue_object_rg
issue_object_rg = item.issue_object
link_dict = get_links(readme_link)
@@ -103,8 +106,16 @@ def begin_reply_generate(item, rest_repo, readme_link):
whether_change_readme = weather_change_readme(rest_repo, link_dict, labels)
if not whether_change_readme:
- latest_pr_number = get_latest_pr_from_readme(rest_repo,link_dict)
- reply_content = latest_pr_parse(rest_repo, latest_pr_number)
+ latest_pr_number = get_latest_pr_from_readme(rest_repo, link_dict)
+ reply_content, sdk_link_number = latest_pr_parse(rest_repo, latest_pr_number)
+ run_pipeline = apr.run_pipeline(issue_link=issue_object_rg.html_url,
+ sdk_issue_object=sdk_repo.get_pull(int(sdk_link_number)),
+ pipeline_url=pipeline_url
+ )
+ if run_pipeline:
+ print(f'{issue_object_rg.number} run pipeline successfully')
+ else:
+ print(f'{issue_object_rg.number} run pipeline fail')
reply_owner(reply_content)
add_label('auto-ask-check', labels)
else:
diff --git a/scripts/release_issue_status/requirement.txt b/scripts/release_issue_status/requirement.txt
index 13781556dd42..512d9702666a 100644
--- a/scripts/release_issue_status/requirement.txt
+++ b/scripts/release_issue_status/requirement.txt
@@ -1,3 +1,7 @@
PyGithub
datetime
-azure.storage.blob==12.8.1
\ No newline at end of file
+requests
+bs4
+azure.storage.blob==12.8.1
+azure-devops
+msrest
diff --git a/scripts/release_issue_status/update_issue_body.py b/scripts/release_issue_status/update_issue_body.py
index 718bc906fc80..b96a25daec92 100644
--- a/scripts/release_issue_status/update_issue_body.py
+++ b/scripts/release_issue_status/update_issue_body.py
@@ -20,7 +20,7 @@ def update_issue_body(sdk_repo, rest_repo, issue_number):
link = link.split(']')[0]
link = link.replace('[', "").replace(']', "").replace('(', "").replace(')', "")
- package_name, readme_link = get_pkname_and_readme_link(rest_repo, link)
+ package_name, readme_link, output_folder = get_pkname_and_readme_link(rest_repo, link)
# Check readme tag format
if 'package' not in readme_tag:
@@ -36,7 +36,7 @@ def update_issue_body(sdk_repo, rest_repo, issue_number):
issue_body_up += raw + '\n'
issue_info.edit(body=issue_body_up)
- return package_name, readme_link
+ return package_name, readme_link, output_folder
def get_pkname_and_readme_link(rest_repo, link):
@@ -46,6 +46,7 @@ def get_pkname_and_readme_link(rest_repo, link):
commit_sha = link.split('commit/')[-1]
commit = rest_repo.get_commit(commit_sha)
link = commit.files[0].blob_url
+ link = re.sub('blob/(.*?)/specification', 'blob/main/specification', link)
# if link is a pr, it can get both pakeage name and readme link.
if 'pull' in link:
@@ -79,18 +80,26 @@ def get_pkname_and_readme_link(rest_repo, link):
readme_link_part = '/specification' + readme_link.split('/specification')[-1]
readme_contents = str(rest_repo.get_contents(readme_link_part).decoded_content)
pk_name = re.findall(r'package-name: (.*?)\\n', readme_contents)[0]
+ out_folder = re.findall(r'\$\(python-sdks-folder\)/(.*?)/azure-', readme_contents)[0]
readme_link = readme_link.replace('python.', '')
- return pk_name, readme_link
+ return pk_name, readme_link, out_folder
-def find_readme_link(sdk_repo, issue_number):
+def find_readme_and_output_folder(sdk_repo, rest_repo, issue_number):
# Get Issue Number
issue_info = sdk_repo.get_issue(number=issue_number)
issue_body = issue_info.body
issue_body_list = issue_body.split("\n")
for row in issue_body_list:
if 'resource-manager' in row:
- readme_link = row + '/readme.md'
- return readme_link
+ readme_link = '{}/readme.md'.format(row.strip("\r"))
+ # Get output folder from readme.python.md
+ readme_python_link = readme_link.split('/resource-manager')[0] + '/resource-manager/readme.python.md'
+ readme_python_link_part = '/specification' + readme_python_link.split('/specification')[-1]
+ readme_contents = str(rest_repo.get_contents(readme_python_link_part).decoded_content)
+ output_folder = re.findall(r'\$\(python-sdks-folder\)/(.*?)/azure-', readme_contents)[0]
+
+ return readme_link, output_folder
raise Exception('Not find readme link,please check')
+