This commit is contained in:
Amirouche 2024-02-02 14:40:02 +01:00
parent 09b391ee02
commit ba8a875394
5 changed files with 21 additions and 21 deletions

View file

View file

@ -10,7 +10,7 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue):
super(JIRACloudIntegrationIssue, self).__init__("JIRA", token)
async def create_new_assignment(self, integration_project_id, title, description, assignee, issue_type):
await self._client.set_jira_project_id(integration_project_id)
self._client.set_jira_project_id(integration_project_id)
data = {
'summary': title,
'description': description,
@ -29,28 +29,28 @@ class JIRACloudIntegrationIssue(BaseIntegrationIssue):
results = []
for integration_project_id in projects_map:
await self._client.set_jira_project_id(integration_project_id)
self._client.set_jira_project_id(integration_project_id)
jql = 'labels = OpenReplay'
if len(projects_map[integration_project_id]) > 0:
jql += f" AND ID IN ({','.join(projects_map[integration_project_id])})"
issues = await self._client.get_issues(jql, offset=0)
issues = self._client.get_issues(jql, offset=0)
results += issues
return {"issues": results}
async def get(self, integration_project_id, assignment_id):
await self._client.set_jira_project_id(integration_project_id)
return await self._client.get_issue_v3(assignment_id)
self._client.set_jira_project_id(integration_project_id)
return self._client.get_issue_v3(assignment_id)
async def comment(self, integration_project_id, assignment_id, comment):
await self._client.set_jira_project_id(integration_project_id)
return await self._client.add_comment_v3(assignment_id, comment)
self._client.set_jira_project_id(integration_project_id)
return self._client.add_comment_v3(assignment_id, comment)
async def get_metas(self, integration_project_id):
meta = {}
await self._client.set_jira_project_id(integration_project_id)
meta['issueTypes'] = await self._client.get_issue_types()
meta['users'] = await self._client.get_assignable_users()
self._client.set_jira_project_id(integration_project_id)
meta['issueTypes'] = self._client.get_issue_types()
meta['users'] = self._client.get_assignable_users()
return {"provider": self.provider.lower(), **meta}
async def get_projects(self):
return await self._client.get_projects()
return self._client.get_projects()

View file

@ -32,9 +32,9 @@ async def get_ux_webcam_signed_url(session_id, project_id, check_existence: bool
results = []
bucket_name = "uxtesting-records" # config("sessions_bucket")
k = f'{session_id}/ux_webcam_record.webm'
if check_existence and not StorageClient.exists(bucket=bucket_name, key=k):
if check_existence and not await StorageClient.exists(bucket=bucket_name, key=k):
return []
results.append(StorageClient.get_presigned_url_for_sharing(
results.append(await StorageClient.get_presigned_url_for_sharing(
bucket=bucket_name,
expires_in=100000,
key=k

View file

@ -25,7 +25,7 @@ class AmazonS3Storage(ObjectStorage):
region_name=config("sessions_region"),
verify=not config("S3_DISABLE_SSL_VERIFY", default=False, cast=bool))
def exists(self, bucket, key):
async def exists(self, bucket, key):
try:
self.resource.Object(bucket, key).load()
except botocore.exceptions.ClientError as e:
@ -92,7 +92,7 @@ class AmazonS3Storage(ObjectStorage):
raise ex
return result["Body"].read().decode()
def tag_for_deletion(self, bucket, key):
async def tag_for_deletion(self, bucket, key):
if not self.exists(bucket, key):
return False
# Copy the file to change the creation date, so it can be deleted X days after the tag's creation
@ -103,10 +103,10 @@ class AmazonS3Storage(ObjectStorage):
TaggingDirective='COPY'
)
self.tag_file(bucket=bucket, file_key=key, tag_key='to_delete_in_days',
await self.tag_file(bucket=bucket, file_key=key, tag_key='to_delete_in_days',
tag_value=config("SCH_DELETE_DAYS", default='7'))
def tag_file(self, file_key, bucket, tag_key, tag_value):
async def tag_file(self, file_key, bucket, tag_key, tag_value):
return self.client.put_object_tagging(
Bucket=bucket,
Key=file_key,

View file

@ -4,13 +4,13 @@ from chalicelib.utils.storage import StorageClient
def tag_session(file_key, tag_key='retention', tag_value='vault'):
bucket = config("sessions_bucket")
if not StorageClient.exists(bucket=bucket, key=file_key):
if not await StorageClient.exists(bucket=bucket, key=file_key):
return None
return StorageClient.tag_file(file_key=file_key, bucket=bucket, tag_key=tag_key, tag_value=tag_value)
return await StorageClient.tag_file(file_key=file_key, bucket=bucket, tag_key=tag_key, tag_value=tag_value)
def tag_record(file_key, tag_key='retention', tag_value='vault'):
bucket = config('ASSIST_RECORDS_BUCKET')
if not StorageClient.exists(bucket=bucket, key=file_key):
if not await StorageClient.exists(bucket=bucket, key=file_key):
return None
return StorageClient.tag_file(file_key=file_key, bucket=bucket, tag_key=tag_key, tag_value=tag_value)
return await StorageClient.tag_file(file_key=file_key, bucket=bucket, tag_key=tag_key, tag_value=tag_value)