wip
This commit is contained in:
parent
0366a2593a
commit
1bfe2e153a
16 changed files with 34 additions and 34 deletions
|
|
@ -1,4 +1,4 @@
|
|||
.import schemas
|
||||
import schemas
|
||||
from chalicelib.core import countries, events, metadata
|
||||
from chalicelib.utils import helper
|
||||
from chalicelib.utils import pg_client
|
||||
|
|
|
|||
|
|
@ -148,7 +148,7 @@ async def __get_table_of_countries(project_id: int, data: schemas.CardTable, use
|
|||
return await __get_table_of_series(project_id=project_id, data=data)
|
||||
|
||||
|
||||
async ef __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = None):
|
||||
async def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = None):
|
||||
return await __get_table_of_series(project_id=project_id, data=data)
|
||||
|
||||
|
||||
|
|
@ -495,7 +495,7 @@ async def search_all(project_id, user_id, data: schemas.SearchCardsSchema, inclu
|
|||
return rows
|
||||
|
||||
|
||||
def get_all(project_id, user_id):
|
||||
async def get_all(project_id, user_id):
|
||||
default_search = schemas.SearchCardsSchema()
|
||||
result = rows = await search_all(project_id=project_id, user_id=user_id, data=default_search)
|
||||
while len(rows) == default_search.limit:
|
||||
|
|
|
|||
|
|
@ -195,7 +195,7 @@ async def get_errors_by_session_id(session_id, project_id):
|
|||
return helper.list_to_camel_case(errors)
|
||||
|
||||
|
||||
def search(text, event_type, project_id, source, key):
|
||||
async def search(text, event_type, project_id, source, key):
|
||||
if not event_type:
|
||||
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
|
||||
|
||||
|
|
|
|||
|
|
@ -76,8 +76,8 @@ def __always_healthy(*_):
|
|||
}
|
||||
|
||||
|
||||
async def __check_be_service(service_name):
|
||||
def fn(*_):
|
||||
def __check_be_service(service_name):
|
||||
async def fn(*_):
|
||||
fail_response = {
|
||||
"health": False,
|
||||
"details": {
|
||||
|
|
@ -157,7 +157,7 @@ async def __check_SSL(*_):
|
|||
|
||||
|
||||
async def __get_sessions_stats(*_):
|
||||
await with pg_client.cursor() as cur:
|
||||
async with pg_client.cursor() as cur:
|
||||
constraints = ["projects.deleted_at IS NULL"]
|
||||
query = cur.mogrify(f"""SELECT COALESCE(SUM(sessions_count),0) AS s_c,
|
||||
COALESCE(SUM(events_count),0) AS e_c
|
||||
|
|
@ -213,7 +213,7 @@ async def __process_health(health_map):
|
|||
if config(f"SKIP_H_{parent_key.upper()}_{element_key.upper()}", cast=bool, default=False):
|
||||
response[parent_key].pop(element_key)
|
||||
else:
|
||||
await response[parent_key][element_key] = health_map[parent_key][element_key]()
|
||||
response[parent_key][element_key] = await health_map[parent_key][element_key]()
|
||||
else:
|
||||
response[parent_key] = await health_map[parent_key]()
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ class GitHubIntegration(integration_base.BaseIntegration):
|
|||
def issue_handler(self):
|
||||
return self._issue_handler
|
||||
|
||||
def get_obfuscated(self):
|
||||
async def get_obfuscated(self):
|
||||
integration = await self.get()
|
||||
if integration is None:
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -129,9 +129,9 @@ async def __delete_sessions_by_session_ids(session_ids):
|
|||
await cur.execute(query=query)
|
||||
|
||||
|
||||
def __delete_session_mobs_by_session_ids(session_ids, project_id):
|
||||
sessions_mobs.delete_mobs(session_ids=session_ids, project_id=project_id)
|
||||
sessions_devtool.delete_mobs(session_ids=session_ids, project_id=project_id)
|
||||
async def __delete_session_mobs_by_session_ids(session_ids, project_id):
|
||||
await sessions_mobs.delete_mobs(session_ids=session_ids, project_id=project_id)
|
||||
await sessions_devtool.delete_mobs(session_ids=session_ids, project_id=project_id)
|
||||
|
||||
|
||||
async def get_scheduled_jobs():
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ def index_to_colname(index):
|
|||
return f"metadata_{index}"
|
||||
|
||||
|
||||
def __get_available_index(project_id):
|
||||
async def __get_available_index(project_id):
|
||||
used_indexs = await get(project_id)
|
||||
used_indexs = [i["index"] for i in used_indexs]
|
||||
if len(used_indexs) >= MAX_INDEXES:
|
||||
|
|
@ -144,7 +144,7 @@ async def delete(tenant_id, project_id, index: int):
|
|||
|
||||
|
||||
async def add(tenant_id, project_id, new_name):
|
||||
index = __get_available_index(project_id=project_id)
|
||||
index = await __get_available_index(project_id=project_id)
|
||||
if index < 1:
|
||||
return {"errors": ["maximum allowed metadata reached"]}
|
||||
if __exists_by_name(project_id=project_id, name=new_name, exclude_index=None):
|
||||
|
|
|
|||
|
|
@ -1962,7 +1962,7 @@ async def resource_type_vs_response_end(project_id, startTimestamp=TimeUTC.now(d
|
|||
return helper.list_to_camel_case(__merge_charts(response_end, actions))
|
||||
|
||||
|
||||
def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
async def get_impacted_sessions_by_js_errors(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
pg_sub_query = await __get_constraints(project_id=project_id, data=args)
|
||||
|
|
@ -2093,7 +2093,7 @@ async def get_resources_vs_visually_complete(project_id, startTimestamp=TimeUTC.
|
|||
return helper.list_to_camel_case(rows)
|
||||
|
||||
|
||||
def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
async def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
pg_sub_query_subset = await __get_constraints(project_id=project_id, time_constraint=True, chart=False, data=args)
|
||||
|
|
@ -2129,7 +2129,7 @@ def get_resources_count_by_type(project_id, startTimestamp=TimeUTC.now(delta_day
|
|||
return rows
|
||||
|
||||
|
||||
def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
async def get_resources_by_party(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), density=7, **args):
|
||||
step_size = __get_step_size(startTimestamp, endTimestamp, density, factor=1)
|
||||
pg_sub_query_subset = await __get_constraints(project_id=project_id, time_constraint=True,
|
||||
|
|
@ -2198,7 +2198,7 @@ async def __get_application_activity_avg_image_load_time(cur, project_id, startT
|
|||
return row
|
||||
|
||||
|
||||
def get_application_activity_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
async def get_application_activity_avg_image_load_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1),
|
||||
endTimestamp=TimeUTC.now(), **args):
|
||||
async with pg_client.cursor() as cur:
|
||||
row = await __get_application_activity_avg_image_load_time(cur, project_id, startTimestamp, endTimestamp, **args)
|
||||
|
|
|
|||
|
|
@ -215,7 +215,7 @@ async def get_by_project_key(project_key):
|
|||
|
||||
|
||||
async def get_project_key(project_id):
|
||||
async async with pg_client.cursor() as cur:
|
||||
async with pg_client.cursor() as cur:
|
||||
query = cur.mogrify("""SELECT project_key
|
||||
FROM public.projects
|
||||
WHERE project_id =%(project_id)s
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
!from decouple import config
|
||||
from decouple import config
|
||||
|
||||
from chalicelib.utils.storage import StorageClient
|
||||
|
||||
|
|
@ -26,7 +26,7 @@ async def get_urls(session_id, project_id, check_existence: bool = True):
|
|||
return results
|
||||
|
||||
|
||||
def delete_mobs(project_id, session_ids):
|
||||
async def delete_mobs(project_id, session_ids):
|
||||
for session_id in session_ids:
|
||||
for k in __get_devtools_keys(project_id=project_id, session_id=session_id):
|
||||
await StorageClient.tag_for_deletion(bucket=config("sessions_bucket"), key=k)
|
||||
|
|
|
|||
|
|
@ -131,7 +131,7 @@ async def get_replay(project_id, session_id, context: schemas.CurrentContext, fu
|
|||
check_existence=False)
|
||||
data['canvasURL'] = await canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
|
||||
if await user_testing.has_test_signals(session_id=session_id, project_id=project_id):
|
||||
data['utxVideo'] = user_await testing.get_ux_webcam_signed_url(session_id=session_id,
|
||||
data['utxVideo'] = await user_testing.get_ux_webcam_signed_url(session_id=session_id,
|
||||
project_id=project_id,
|
||||
check_existence=False)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from chalicelib.core import sourcemaps_parser
|
|||
from chalicelib.utils.storage import StorageClient, generators
|
||||
|
||||
|
||||
def presign_share_urls(project_id, urls):
|
||||
async def presign_share_urls(project_id, urls):
|
||||
results = []
|
||||
for u in urls:
|
||||
results.append(await StorageClient.get_presigned_url_for_sharing(bucket=config('sourcemaps_bucket'), expires_in=120,
|
||||
|
|
@ -16,7 +16,7 @@ def presign_share_urls(project_id, urls):
|
|||
return results
|
||||
|
||||
|
||||
def presign_upload_urls(project_id, urls):
|
||||
async def presign_upload_urls(project_id, urls):
|
||||
results = []
|
||||
for u in urls:
|
||||
results.append(await StorageClient.get_presigned_url_for_upload(bucket=config('sourcemaps_bucket'),
|
||||
|
|
|
|||
|
|
@ -633,7 +633,7 @@ async def logout(user_id: int):
|
|||
await cur.execute(query)
|
||||
|
||||
|
||||
def refresh(user_id: int, tenant_id: int = -1) -> dict:
|
||||
async def refresh(user_id: int, tenant_id: int = -1) -> dict:
|
||||
jwt_iat, jwt_r_jti, jwt_r_iat = await refresh_jwt_iat_jti(user_id=user_id)
|
||||
return {
|
||||
"jwt": authorizers.generate_jwt(user_id=user_id, tenant_id=tenant_id, iat=jwt_iat,
|
||||
|
|
|
|||
|
|
@ -201,22 +201,22 @@ def get_errors_by_session_id(session_id, project_id):
|
|||
return helper.list_to_camel_case(errors)
|
||||
|
||||
|
||||
def search(text, event_type, project_id, source, key):
|
||||
async def search(text, event_type, project_id, source, key):
|
||||
if not event_type:
|
||||
return {"data": autocomplete.__get_autocomplete_table(text, project_id)}
|
||||
|
||||
if event_type in SUPPORTED_TYPES.keys():
|
||||
rows = SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
|
||||
rows = await SUPPORTED_TYPES[event_type].get(project_id=project_id, value=text, key=key, source=source)
|
||||
# for IOS events autocomplete
|
||||
# if event_type + "_IOS" in SUPPORTED_TYPES.keys():
|
||||
# rows += SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key,source=source)
|
||||
elif event_type + "_IOS" in SUPPORTED_TYPES.keys():
|
||||
rows = SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source)
|
||||
rows = await SUPPORTED_TYPES[event_type + "_IOS"].get(project_id=project_id, value=text, key=key, source=source)
|
||||
elif event_type in sessions_metas.SUPPORTED_TYPES.keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
return await sessions_metas.search(text, event_type, project_id)
|
||||
elif event_type.endswith("_IOS") \
|
||||
and event_type[:-len("_IOS")] in sessions_metas.SUPPORTED_TYPES.keys():
|
||||
return sessions_metas.search(text, event_type, project_id)
|
||||
return await sessions_metas.search(text, event_type, project_id)
|
||||
else:
|
||||
return {"errors": ["unsupported event"]}
|
||||
|
||||
|
|
|
|||
|
|
@ -71,11 +71,11 @@ SUPPORTED_TYPES = {
|
|||
}
|
||||
|
||||
|
||||
def search(text: str, meta_type: schemas.FilterType, project_id: int):
|
||||
async def search(text: str, meta_type: schemas.FilterType, project_id: int):
|
||||
rows = []
|
||||
if meta_type not in list(SUPPORTED_TYPES.keys()):
|
||||
return {"errors": ["unsupported type"]}
|
||||
rows += SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text)
|
||||
rows += await SUPPORTED_TYPES[meta_type].get(project_id=project_id, text=text)
|
||||
# for IOS events autocomplete
|
||||
# if meta_type + "_IOS" in list(SUPPORTED_TYPES.keys()):
|
||||
# rows += SUPPORTED_TYPES[meta_type + "_IOS"].get(project_id=project_id, text=text)
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ def get_by_id2_pg(project_id, session_id, context: schemas.CurrentContext, full_
|
|||
data['domURL'] = sessions_mobs.get_urls(session_id=session_id, project_id=project_id,
|
||||
check_existence=False)
|
||||
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
|
||||
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
||||
data['devtoolsURL'] = await sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
||||
context=context, check_existence=False)
|
||||
data['resources'] = resources.get_by_session_id(session_id=session_id, project_id=project_id,
|
||||
start_ts=data["startTs"], duration=data["duration"])
|
||||
|
|
@ -135,7 +135,7 @@ def get_replay(project_id, session_id, context: schemas.CurrentContext, full_dat
|
|||
check_existence=False)
|
||||
else:
|
||||
data['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session_id, check_existence=False)
|
||||
data['devtoolsURL'] = sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
||||
data['devtoolsURL'] = await sessions_devtool.get_urls(session_id=session_id, project_id=project_id,
|
||||
context=context, check_existence=False)
|
||||
data['canvasURL'] = canvas.get_canvas_presigned_urls(session_id=session_id, project_id=project_id)
|
||||
if user_testing.has_test_signals(session_id=session_id, project_id=project_id):
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue