diff --git a/.github/workflows/patch-build.yaml b/.github/workflows/patch-build.yaml index 0e151c51f..c6ef462c1 100644 --- a/.github/workflows/patch-build.yaml +++ b/.github/workflows/patch-build.yaml @@ -146,18 +146,8 @@ jobs: destination_branch: "main" pr_title: "Updated patch build from main ${{ env.HEAD_COMMIT_ID }}" pr_body: | - This PR updates the Helm chart version after building the patch from ${{ env.HEAD_COMMIT_ID }}. - - - name: Set Remote with GITHUB_TOKEN - run: | - git config --unset http.https://github.com/.extraheader - git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git - - name: Push ${{ secrets.ACTIONS_COMMMIT_TOKEN }} branch to tag - run: | - git fetch --tags - git checkout main - echo git push origin $BRANCH_NAME:refs/tags/$(git tag --list 'v[0-9]*' --sort=-v:refname | head -n 1) --force - git push origin $BRANCH_NAME:refs/tags/$(git tag --list 'v[0-9]*' --sort=-v:refname | head -n 1) --force + This PR updates the Helm chart version after building the patch from $HEAD_COMMIT_ID. + Once this PR is merged, tag update job will run automatically. # - name: Debug Job # if: ${{ failure() }} diff --git a/.github/workflows/update-tag.yaml b/.github/workflows/update-tag.yaml index 7016d7019..edbeaa4e4 100644 --- a/.github/workflows/update-tag.yaml +++ b/.github/workflows/update-tag.yaml @@ -1,35 +1,43 @@ on: - workflow_dispatch: - description: "This workflow will build for patches for latest tag, and will Always use commit from main branch." - inputs: - services: - description: "This action will update the latest tag with current main branch HEAD. Should I proceed ? true/false" - required: true - default: "false" - -name: Force Push tag with main branch HEAD + pull_request: + types: [closed] + branches: + - main +name: Release tag update --force jobs: deploy: name: Build Patch from main runs-on: ubuntu-latest - env: - DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }} - DEPOT_PROJECT_ID: ${{ secrets.DEPOT_PROJECT_ID }} + if: ${{ (github.event_name == 'pull_request' && github.event.pull_request.merged == true) || github.event.inputs.services == 'true' }} steps: - name: Checkout uses: actions/checkout@v2 + + - name: Get latest release tag using GitHub API + id: get-latest-tag + run: | + LATEST_TAG=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \ + "https://api.github.com/repos/${{ github.repository }}/releases/latest" \ + | jq -r .tag_name) + + # Fallback to git command if API doesn't return a tag + if [ "$LATEST_TAG" == "null" ] || [ -z "$LATEST_TAG" ]; then + echo "Not found latest tag" + exit 100 + fi + + echo "LATEST_TAG=$LATEST_TAG" >> $GITHUB_ENV + echo "Latest tag: $LATEST_TAG" + - name: Set Remote with GITHUB_TOKEN run: | git config --unset http.https://github.com/.extraheader git remote set-url origin https://x-access-token:${{ secrets.ACTIONS_COMMMIT_TOKEN }}@github.com/${{ github.repository }}.git + - name: Push main branch to tag run: | git fetch --tags git checkout main - git push origin HEAD:refs/tags/$(git tag --list 'v[0-9]*' --sort=-v:refname | head -n 1) --force - # - name: Debug Job - # if: ${{ failure() }} - # uses: mxschmitt/action-tmate@v3 - # with: - # limit-access-to-actor: true + echo "Updating tag ${{ env.LATEST_TAG }} to point to latest commit on main" + git push origin HEAD:refs/tags/${{ env.LATEST_TAG }} --force diff --git a/api/chalicelib/core/autocomplete/__init__.py b/api/chalicelib/core/autocomplete/__init__.py index e69de29bb..0186c2f29 100644 --- a/api/chalicelib/core/autocomplete/__init__.py +++ b/api/chalicelib/core/autocomplete/__init__.py @@ -0,0 +1,11 @@ +import logging + +from decouple import config + +logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) + +if config("EXP_AUTOCOMPLETE", cast=bool, default=False): + logging.info(">>> Using experimental autocomplete") + from . import autocomplete_ch as autocomplete +else: + from . import autocomplete diff --git a/api/chalicelib/core/autocomplete/autocomplete.py b/api/chalicelib/core/autocomplete/autocomplete.py index aa40ccbd0..64521e087 100644 --- a/api/chalicelib/core/autocomplete/autocomplete.py +++ b/api/chalicelib/core/autocomplete/autocomplete.py @@ -85,7 +85,8 @@ def __generic_query(typename, value_length=None): ORDER BY value""" if value_length is None or value_length > 2: - return f"""(SELECT DISTINCT value, type + return f"""SELECT DISTINCT ON(value,type) value, type + ((SELECT DISTINCT value, type FROM {TABLE} WHERE project_id = %(project_id)s @@ -101,7 +102,7 @@ def __generic_query(typename, value_length=None): AND type='{typename.upper()}' AND value ILIKE %(value)s ORDER BY value - LIMIT 5);""" + LIMIT 5)) AS raw;""" return f"""SELECT DISTINCT value, type FROM {TABLE} WHERE @@ -326,7 +327,7 @@ def __search_metadata(project_id, value, key=None, source=None): AND {colname} ILIKE %(svalue)s LIMIT 5)""") with pg_client.PostgresClient() as cur: cur.execute(cur.mogrify(f"""\ - SELECT key, value, 'METADATA' AS TYPE + SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE FROM({" UNION ALL ".join(sub_from)}) AS all_metas LIMIT 5;""", {"project_id": project_id, "value": helper.string_to_sql_like(value), "svalue": helper.string_to_sql_like("^" + value)})) diff --git a/ee/api/chalicelib/core/autocomplete/autocomplete_ch.py b/api/chalicelib/core/autocomplete/autocomplete_ch.py similarity index 98% rename from ee/api/chalicelib/core/autocomplete/autocomplete_ch.py rename to api/chalicelib/core/autocomplete/autocomplete_ch.py index 2eed9d5bf..7118e91e6 100644 --- a/ee/api/chalicelib/core/autocomplete/autocomplete_ch.py +++ b/api/chalicelib/core/autocomplete/autocomplete_ch.py @@ -86,7 +86,8 @@ def __generic_query(typename, value_length=None): ORDER BY value""" if value_length is None or value_length > 2: - return f"""(SELECT DISTINCT value, type + return f"""SELECT DISTINCT ON(value, type) value, type + FROM ((SELECT DISTINCT value, type FROM {TABLE} WHERE project_id = %(project_id)s @@ -102,7 +103,7 @@ def __generic_query(typename, value_length=None): AND type='{typename.upper()}' AND value ILIKE %(value)s ORDER BY value - LIMIT 5);""" + LIMIT 5)) AS raw;""" return f"""SELECT DISTINCT value, type FROM {TABLE} WHERE @@ -257,7 +258,7 @@ def __search_metadata(project_id, value, key=None, source=None): WHERE project_id = %(project_id)s AND {colname} ILIKE %(svalue)s LIMIT 5)""") with ch_client.ClickHouseClient() as cur: - query = cur.format(query=f"""SELECT key, value, 'METADATA' AS TYPE + query = cur.format(query=f"""SELECT DISTINCT ON(key, value) key, value, 'METADATA' AS TYPE FROM({" UNION ALL ".join(sub_from)}) AS all_metas LIMIT 5;""", parameters={"project_id": project_id, "value": helper.string_to_sql_like(value), "svalue": helper.string_to_sql_like("^" + value)}) diff --git a/api/chalicelib/core/errors/errors_ch.py b/api/chalicelib/core/errors/errors_ch.py index f0845110b..1c5e6c13e 100644 --- a/api/chalicelib/core/errors/errors_ch.py +++ b/api/chalicelib/core/errors/errors_ch.py @@ -1,3 +1,5 @@ +import logging + import schemas from chalicelib.core import metadata from chalicelib.core.errors import errors_legacy @@ -7,6 +9,8 @@ from chalicelib.utils import ch_client, exp_ch_helper from chalicelib.utils import helper, metrics_helper from chalicelib.utils.TimeUTC import TimeUTC +logger = logging.getLogger(__name__) + def _multiple_values(values, value_key="value"): query_values = {} @@ -378,9 +382,9 @@ def search(data: schemas.SearchErrorsSchema, project: schemas.ProjectContext, us ORDER BY timestamp) AS sub_table GROUP BY error_id) AS chart_details ON details.error_id=chart_details.error_id;""" - # print("------------") - # print(ch.format(main_ch_query, params)) - # print("------------") + logger.debug("------------") + logger.debug(ch.format(main_ch_query, params)) + logger.debug("------------") query = ch.format(query=main_ch_query, parameters=params) rows = ch.execute(query=query) diff --git a/api/chalicelib/core/metrics/custom_metrics.py b/api/chalicelib/core/metrics/custom_metrics.py index 8c02d2be3..c73bd282f 100644 --- a/api/chalicelib/core/metrics/custom_metrics.py +++ b/api/chalicelib/core/metrics/custom_metrics.py @@ -241,14 +241,13 @@ def create_card(project: schemas.ProjectContext, user_id, data: schemas.CardSche params["card_info"] = json.dumps(params["card_info"]) query = """INSERT INTO metrics (project_id, user_id, name, is_public, - view_type, metric_type, metric_of, metric_value, - metric_format, default_config, thumbnail, data, - card_info) - VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s, - %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, - %(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s, - %(card_info)s) - RETURNING metric_id""" + view_type, metric_type, metric_of, metric_value, + metric_format, default_config, thumbnail, data, + card_info) + VALUES (%(project_id)s, %(user_id)s, %(name)s, %(is_public)s, + %(view_type)s, %(metric_type)s, %(metric_of)s, %(metric_value)s, + %(metric_format)s, %(default_config)s, %(thumbnail)s, %(session_data)s, + %(card_info)s) RETURNING metric_id""" if len(data.series) > 0: query = f"""WITH m AS ({query}) INSERT INTO metric_series(metric_id, index, name, filter) @@ -525,13 +524,13 @@ def get_all(project_id, user_id): def delete_card(project_id, metric_id, user_id): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("""\ - UPDATE public.metrics - SET deleted_at = timezone('utc'::text, now()), edited_at = timezone('utc'::text, now()) - WHERE project_id = %(project_id)s - AND metric_id = %(metric_id)s - AND (user_id = %(user_id)s OR is_public) - RETURNING data;""", + cur.mogrify(""" \ + UPDATE public.metrics + SET deleted_at = timezone('utc'::text, now()), + edited_at = timezone('utc'::text, now()) + WHERE project_id = %(project_id)s + AND metric_id = %(metric_id)s + AND (user_id = %(user_id)s OR is_public) RETURNING data;""", {"metric_id": metric_id, "project_id": project_id, "user_id": user_id}) ) @@ -615,13 +614,14 @@ def get_series_for_alert(project_id, user_id): FALSE AS predefined, metric_id, series_id - FROM metric_series - INNER JOIN metrics USING (metric_id) - WHERE metrics.deleted_at ISNULL - AND metrics.project_id = %(project_id)s - AND metrics.metric_type = 'timeseries' - AND (user_id = %(user_id)s OR is_public) - ORDER BY name;""", + FROM metric_series + INNER JOIN metrics USING (metric_id) + WHERE metrics.deleted_at ISNULL + AND metrics.project_id = %(project_id)s + AND metrics.metric_type = 'timeseries' + AND (user_id = %(user_id)s + OR is_public) + ORDER BY name;""", {"project_id": project_id, "user_id": user_id} ) ) @@ -632,11 +632,11 @@ def get_series_for_alert(project_id, user_id): def change_state(project_id, metric_id, user_id, status): with pg_client.PostgresClient() as cur: cur.execute( - cur.mogrify("""\ - UPDATE public.metrics - SET active = %(status)s - WHERE metric_id = %(metric_id)s - AND (user_id = %(user_id)s OR is_public);""", + cur.mogrify(""" \ + UPDATE public.metrics + SET active = %(status)s + WHERE metric_id = %(metric_id)s + AND (user_id = %(user_id)s OR is_public);""", {"metric_id": metric_id, "status": status, "user_id": user_id}) ) return get_card(metric_id=metric_id, project_id=project_id, user_id=user_id) @@ -674,7 +674,8 @@ def get_funnel_sessions_by_issue(user_id, project_id, metric_id, issue_id, "issue": issue} -def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id, data: schemas.CardSessionsSchema): +def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id, + data: schemas.CardSessionsSchema, for_dashboard: bool = False): raw_metric: dict = get_card(metric_id=metric_id, project_id=project.project_id, user_id=user_id, include_data=True) if raw_metric is None: @@ -693,7 +694,8 @@ def make_chart_from_card(project: schemas.ProjectContext, user_id, metric_id, da return heatmaps.search_short_session(project_id=project.project_id, data=schemas.HeatMapSessionsSearch(**metric.model_dump()), user_id=user_id) - + elif metric.metric_type == schemas.MetricType.PATH_ANALYSIS and for_dashboard: + metric.hide_excess = True return get_chart(project=project, data=metric, user_id=user_id) diff --git a/api/chalicelib/core/product_analytics/autocomplete.py b/api/chalicelib/core/product_analytics/autocomplete.py index 5915a8ab6..fafcc72d6 100644 --- a/api/chalicelib/core/product_analytics/autocomplete.py +++ b/api/chalicelib/core/product_analytics/autocomplete.py @@ -30,21 +30,23 @@ def search_properties(project_id: int, property_name: Optional[str] = None, even with ClickHouseClient() as ch_client: select = "value" full_args = {"project_id": project_id, "limit": 20, - "event_name": event_name, "property_name": property_name} + "event_name": event_name, "property_name": property_name, "q": q, + "property_name_l": helper.string_to_sql_like(property_name), + "q_l": helper.string_to_sql_like(q)} constraints = ["project_id = %(project_id)s", "_timestamp >= now()-INTERVAL 1 MONTH"] if event_name: constraints += ["event_name = %(event_name)s"] + if property_name and q: constraints += ["property_name = %(property_name)s"] elif property_name: select = "DISTINCT ON(property_name) property_name AS value" - constraints += ["property_name ILIKE %(property_name)s"] - full_args["property_name"] = helper.string_to_sql_like(property_name) + constraints += ["property_name ILIKE %(property_name_l)s"] + if q: - constraints += ["value ILIKE %(q)s"] - full_args["q"] = helper.string_to_sql_like(q) + constraints += ["value ILIKE %(q_l)s"] query = ch_client.format( f"""SELECT {select},data_count FROM product_analytics.autocomplete_event_properties_grouped diff --git a/api/chalicelib/core/product_analytics/events.py b/api/chalicelib/core/product_analytics/events.py index 10e578c7d..f7b4cea86 100644 --- a/api/chalicelib/core/product_analytics/events.py +++ b/api/chalicelib/core/product_analytics/events.py @@ -4,7 +4,7 @@ import schemas from chalicelib.utils import helper from chalicelib.utils import sql_helper as sh from chalicelib.utils.ch_client import ClickHouseClient -from chalicelib.utils.exp_ch_helper import get_sub_condition +from chalicelib.utils.exp_ch_helper import get_sub_condition, get_col_cast logger = logging.getLogger(__name__) PREDEFINED_EVENTS = { @@ -111,11 +111,13 @@ def search_events(project_id: int, data: schemas.EventsSearchPayloadSchema): sub_conditions = [] for j, ef in enumerate(f.properties.filters): p_k = f"e_{i}_p_{j}" - full_args = {**full_args, **sh.multi_values(ef.value, value_key=p_k)} + full_args = {**full_args, **sh.multi_values(ef.value, value_key=p_k, data_type=ef.data_type)} + cast = get_col_cast(data_type=ef.data_type, value=ef.value) if ef.is_predefined: - sub_condition = get_sub_condition(col_name=ef.name, val_name=p_k, operator=ef.operator) + sub_condition = get_sub_condition(col_name=f"accurateCastOrNull(`{ef.name}`,'{cast}')", + val_name=p_k, operator=ef.operator) else: - sub_condition = get_sub_condition(col_name=f"properties.{ef.name}", + sub_condition = get_sub_condition(col_name=f"accurateCastOrNull(properties.`{ef.name}`,{cast})", val_name=p_k, operator=ef.operator) sub_conditions.append(sh.multi_conditions(sub_condition, ef.value, value_key=p_k)) if len(sub_conditions) > 0: diff --git a/api/chalicelib/core/sessions/sessions_ch.py b/api/chalicelib/core/sessions/sessions_ch.py index f9b5f7ea5..8d1929c70 100644 --- a/api/chalicelib/core/sessions/sessions_ch.py +++ b/api/chalicelib/core/sessions/sessions_ch.py @@ -6,7 +6,7 @@ from chalicelib.core import events, metadata from . import performance_event, sessions_legacy from chalicelib.utils import pg_client, helper, metrics_helper, ch_client, exp_ch_helper from chalicelib.utils import sql_helper as sh -from chalicelib.utils.exp_ch_helper import get_sub_condition +from chalicelib.utils.exp_ch_helper import get_sub_condition, get_col_cast logger = logging.getLogger(__name__) @@ -1264,14 +1264,15 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu for l, property in enumerate(event.properties.filters): a_k = f"{e_k}_att_{l}" full_args = {**full_args, - **sh.multi_values(property.value, value_key=a_k)} - + **sh.multi_values(property.value, value_key=a_k, data_type=property.data_type)} + cast = get_col_cast(data_type=property.data_type, value=property.value) if property.is_predefined: - condition = get_sub_condition(col_name=f"main.{property.name}", + condition = get_sub_condition(col_name=f"accurateCastOrNull(main.`{property.name}`,'{cast}')", val_name=a_k, operator=property.operator) else: - condition = get_sub_condition(col_name=f"main.properties.{property.name}", - val_name=a_k, operator=property.operator) + condition = get_sub_condition( + col_name=f"accurateCastOrNull(main.properties.`{property.name}`,'{cast}')", + val_name=a_k, operator=property.operator) event_where.append( sh.multi_conditions(condition, property.value, value_key=a_k) ) @@ -1505,7 +1506,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu if c_f.type == schemas.FetchFilterType.FETCH_URL.value: _extra_or_condition.append( sh.multi_conditions(f"extra_event.url_path {op} %({e_k})s", - c_f.value, value_key=e_k)) + c_f.value, value_key=e_k)) else: logging.warning(f"unsupported extra_event type:${c.type}") if len(_extra_or_condition) > 0: @@ -1577,18 +1578,15 @@ def get_user_sessions(project_id, user_id, start_date, end_date): def get_session_user(project_id, user_id): with pg_client.PostgresClient() as cur: query = cur.mogrify( - """\ - SELECT - user_id, - count(*) as session_count, - max(start_ts) as last_seen, - min(start_ts) as first_seen - FROM - "public".sessions - WHERE - project_id = %(project_id)s - AND user_id = %(userId)s - AND duration is not null + """ \ + SELECT user_id, + count(*) as session_count, + max(start_ts) as last_seen, + min(start_ts) as first_seen + FROM "public".sessions + WHERE project_id = %(project_id)s + AND user_id = %(userId)s + AND duration is not null GROUP BY user_id; """, {"project_id": project_id, "userId": user_id} diff --git a/api/chalicelib/utils/exp_ch_helper.py b/api/chalicelib/utils/exp_ch_helper.py index babef4d57..aaf41afb2 100644 --- a/api/chalicelib/utils/exp_ch_helper.py +++ b/api/chalicelib/utils/exp_ch_helper.py @@ -1,10 +1,13 @@ import logging import re -from typing import Union +from typing import Union, Any import schemas from chalicelib.utils import sql_helper as sh from schemas import SearchEventOperator +import math +import struct +from decimal import Decimal logger = logging.getLogger(__name__) @@ -158,8 +161,73 @@ def simplify_clickhouse_types(ch_types: list[str]) -> list[str]: def get_sub_condition(col_name: str, val_name: str, - operator: Union[schemas.SearchEventOperator, schemas.MathOperator]): + operator: Union[schemas.SearchEventOperator, schemas.MathOperator]) -> str: if operator == SearchEventOperator.PATTERN: return f"match({col_name}, %({val_name})s)" op = sh.get_sql_operator(operator) return f"{col_name} {op} %({val_name})s" + + +def get_col_cast(data_type: schemas.PropertyType, value: Any) -> str: + if value is None or len(value) == 0: + return "" + if isinstance(value, list): + value = value[0] + if data_type in (schemas.PropertyType.INT, schemas.PropertyType.FLOAT): + return best_clickhouse_type(value) + return data_type.capitalize() + + +# (type_name, minimum, maximum) – ordered by increasing size +_INT_RANGES = [ + ("Int8", -128, 127), + ("UInt8", 0, 255), + ("Int16", -32_768, 32_767), + ("UInt16", 0, 65_535), + ("Int32", -2_147_483_648, 2_147_483_647), + ("UInt32", 0, 4_294_967_295), + ("Int64", -9_223_372_036_854_775_808, 9_223_372_036_854_775_807), + ("UInt64", 0, 18_446_744_073_709_551_615), +] + + +def best_clickhouse_type(value): + """ + Return the most compact ClickHouse numeric type that can store *value* loss-lessly. + + """ + # Treat bool like tiny int + if isinstance(value, bool): + value = int(value) + + # --- Integers --- + if isinstance(value, int): + for name, lo, hi in _INT_RANGES: + if lo <= value <= hi: + return name + # Beyond UInt64: ClickHouse offers Int128 / Int256 or Decimal + return "Int128" + + # --- Decimal.Decimal (exact) --- + if isinstance(value, Decimal): + # ClickHouse Decimal32/64/128 have 9 / 18 / 38 significant digits. + digits = len(value.as_tuple().digits) + if digits <= 9: + return "Decimal32" + elif digits <= 18: + return "Decimal64" + else: + return "Decimal128" + + # --- Floats --- + if isinstance(value, float): + if not math.isfinite(value): + return "Float64" # inf / nan → always Float64 + + # Check if a round-trip through 32-bit float preserves the bit pattern + packed = struct.pack("f", value) + if struct.unpack("f", packed)[0] == value: + return "Float32" + return "Float64" + + raise TypeError(f"Unsupported type: {type(value).__name__}") diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 4d0d09427..0c128d4a1 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -99,6 +99,8 @@ def allow_captcha(): def string_to_sql_like(value): + if value is None: + return None value = re.sub(' +', ' ', value) value = value.replace("*", "%") if value.startswith("^"): @@ -334,5 +336,3 @@ def cast_session_id_to_string(data): for key in keys: data[key] = cast_session_id_to_string(data[key]) return data - - diff --git a/api/chalicelib/utils/sql_helper.py b/api/chalicelib/utils/sql_helper.py index 521050634..7290ff638 100644 --- a/api/chalicelib/utils/sql_helper.py +++ b/api/chalicelib/utils/sql_helper.py @@ -52,12 +52,16 @@ def multi_conditions(condition, values, value_key="value", is_not=False): return "(" + (" AND " if is_not else " OR ").join(query) + ")" -def multi_values(values, value_key="value"): +def multi_values(values, value_key="value", data_type: schemas.PropertyType | None = None): query_values = {} if values is not None and isinstance(values, list): for i in range(len(values)): k = f"{value_key}_{i}" query_values[k] = values[i].value if isinstance(values[i], Enum) else values[i] + if data_type: + if data_type == schemas.PropertyType.STRING: + query_values[k] = str(query_values[k]) + return query_values diff --git a/api/routers/subs/metrics.py b/api/routers/subs/metrics.py index d60280c49..082ab9fdf 100644 --- a/api/routers/subs/metrics.py +++ b/api/routers/subs/metrics.py @@ -219,6 +219,17 @@ def get_card_chart(projectId: int, metric_id: int, data: schemas.CardSessionsSch return {"data": data} +@app.post("/{projectId}/dashboards/{dashboardId}/cards/{metric_id}/chart", tags=["card"]) +@app.post("/{projectId}/dashboards/{dashboardId}/cards/{metric_id}", tags=["card"]) +def get_card_chart_for_dashboard(projectId: int, dashboardId: int, metric_id: int, + data: schemas.CardSessionsSchema = Body(...), + context: schemas.CurrentContext = Depends(OR_context)): + data = custom_metrics.make_chart_from_card( + project=context.project, user_id=context.user_id, metric_id=metric_id, data=data, for_dashboard=True + ) + return {"data": data} + + @app.post("/{projectId}/cards/{metric_id}", tags=["dashboard"]) def update_card(projectId: int, metric_id: int, data: schemas.CardSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): diff --git a/api/routers/subs/product_analytics.py b/api/routers/subs/product_analytics.py index d7dbcba23..6141af313 100644 --- a/api/routers/subs/product_analytics.py +++ b/api/routers/subs/product_analytics.py @@ -63,8 +63,12 @@ def autocomplete_events(projectId: int, q: Optional[str] = None, @app.get('/{projectId}/properties/autocomplete', tags=["autocomplete"]) -def autocomplete_properties(projectId: int, propertyName: str, eventName: Optional[str] = None, +def autocomplete_properties(projectId: int, propertyName: Optional[str] = None, eventName: Optional[str] = None, q: Optional[str] = None, context: schemas.CurrentContext = Depends(OR_context)): + if not propertyName and not eventName and not q: + return {"error": ["Specify eventName to get top properties", + "Specify propertyName to get top values of that property", + "Specify eventName&propertyName to get top values of that property for the selected event"]} return {"data": autocomplete.search_properties(project_id=projectId, event_name=None if not eventName \ or len(eventName) == 0 else eventName, diff --git a/api/schemas/schemas.py b/api/schemas/schemas.py index 0e6842437..973dce8f1 100644 --- a/api/schemas/schemas.py +++ b/api/schemas/schemas.py @@ -3,12 +3,13 @@ from typing import Optional, List, Union, Literal from pydantic import Field, EmailStr, HttpUrl, SecretStr, AnyHttpUrl from pydantic import field_validator, model_validator, computed_field +from pydantic import AfterValidator from pydantic.functional_validators import BeforeValidator from chalicelib.utils.TimeUTC import TimeUTC from .overrides import BaseModel, Enum, ORUnion from .transformers_validators import transform_email, remove_whitespace, remove_duplicate_values, single_to_list, \ - force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric + force_is_event, NAME_PATTERN, int_to_string, check_alphanumeric, check_regex class _GRecaptcha(BaseModel): @@ -537,7 +538,7 @@ class GraphqlFilterType(str, Enum): class RequestGraphqlFilterSchema(BaseModel): type: Union[FetchFilterType, GraphqlFilterType] = Field(...) value: List[Union[int, str]] = Field(...) - operator: Union[SearchEventOperator, MathOperator] = Field(...) + operator: Annotated[Union[SearchEventOperator, MathOperator], AfterValidator(check_regex)] = Field(...) @model_validator(mode="before") @classmethod @@ -581,11 +582,23 @@ class EventPredefinedPropertyType(str, Enum): IMPORT = "$import" +class PropertyType(str, Enum): + INT = "int" + FLOAT = "float" + DATETIME = "datetime" + STRING = "string" + ARRAY = "array" + TUPLE = "tuple" + MAP = "map" + NESTED = "nested" + + class PropertyFilterSchema(BaseModel): is_event: Literal[False] = False name: Union[EventPredefinedPropertyType, str] = Field(...) operator: Union[SearchEventOperator, MathOperator] = Field(...) value: List[Union[int, str]] = Field(...) + data_type: PropertyType = Field(default=PropertyType.STRING.value) # property_type: Optional[Literal["string", "number", "date"]] = Field(default=None) @@ -600,6 +613,13 @@ class PropertyFilterSchema(BaseModel): self.name = self.name.value return self + @model_validator(mode='after') + def _check_regex_value(self): + if self.operator == SearchEventOperator.PATTERN: + for v in self.value: + check_regex(v) + return self + class EventPropertiesSchema(BaseModel): operator: Literal["and", "or"] = Field(...) @@ -645,6 +665,13 @@ class SessionSearchEventSchema(BaseModel): f"operator:{self.operator} is only available for event-type: {EventType.CLICK}" return self + @model_validator(mode='after') + def _check_regex_value(self): + if self.operator == SearchEventOperator.PATTERN: + for v in self.value: + check_regex(v) + return self + class SessionSearchFilterSchema(BaseModel): is_event: Literal[False] = False @@ -702,6 +729,13 @@ class SessionSearchFilterSchema(BaseModel): return self + @model_validator(mode='after') + def _check_regex_value(self): + if self.operator == SearchEventOperator.PATTERN: + for v in self.value: + check_regex(v) + return self + class _PaginatedSchema(BaseModel): limit: int = Field(default=200, gt=0, le=200) @@ -868,6 +902,13 @@ class PathAnalysisSubFilterSchema(BaseModel): values["isEvent"] = True return values + @model_validator(mode='after') + def _check_regex_value(self): + if self.operator == SearchEventOperator.PATTERN: + for v in self.value: + check_regex(v) + return self + class _ProductAnalyticsFilter(BaseModel): is_event: Literal[False] = False @@ -878,6 +919,13 @@ class _ProductAnalyticsFilter(BaseModel): _remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values) + @model_validator(mode='after') + def _check_regex_value(self): + if self.operator == SearchEventOperator.PATTERN: + for v in self.value: + check_regex(v) + return self + class _ProductAnalyticsEventFilter(BaseModel): is_event: Literal[True] = True @@ -888,6 +936,13 @@ class _ProductAnalyticsEventFilter(BaseModel): _remove_duplicate_values = field_validator('value', mode='before')(remove_duplicate_values) + @model_validator(mode='after') + def _check_regex_value(self): + if self.operator == SearchEventOperator.PATTERN: + for v in self.value: + check_regex(v) + return self + # this type is created to allow mixing events&filters and specifying a discriminator for PathAnalysis series filter ProductAnalyticsFilter = Annotated[Union[_ProductAnalyticsFilter, _ProductAnalyticsEventFilter], @@ -1332,6 +1387,13 @@ class LiveSessionSearchFilterSchema(BaseModel): assert len(self.source) > 0, "source should not be empty for METADATA type" return self + @model_validator(mode='after') + def _check_regex_value(self): + if self.operator == SearchEventOperator.PATTERN: + for v in self.value: + check_regex(v) + return self + class LiveSessionsSearchPayloadSchema(_PaginatedSchema): filters: List[LiveSessionSearchFilterSchema] = Field([]) diff --git a/api/schemas/transformers_validators.py b/api/schemas/transformers_validators.py index 106f8747c..90d88aead 100644 --- a/api/schemas/transformers_validators.py +++ b/api/schemas/transformers_validators.py @@ -1,3 +1,4 @@ +import re from typing import Union, Any, Type from pydantic import ValidationInfo @@ -57,3 +58,17 @@ def check_alphanumeric(v: str, info: ValidationInfo) -> str: is_alphanumeric = v.replace(' ', '').isalnum() assert is_alphanumeric, f'{info.field_name} must be alphanumeric' return v + + +def check_regex(v: str) -> str: + assert v is not None, "Regex is null" + assert isinstance(v, str), "Regex value must be a string" + assert len(v) > 0, "Regex is empty" + is_valid = None + try: + re.compile(v) + except re.error as exc: + is_valid = f"Invalid regex: {exc} (at position {exc.pos})" + + assert is_valid is None, is_valid + return v diff --git a/backend/internal/db/datasaver/saver.go b/backend/internal/db/datasaver/saver.go index 8309059ff..abbbf50e6 100644 --- a/backend/internal/db/datasaver/saver.go +++ b/backend/internal/db/datasaver/saver.go @@ -2,7 +2,7 @@ package datasaver import ( "context" - + "encoding/json" "openreplay/backend/internal/config/db" "openreplay/backend/pkg/db/clickhouse" "openreplay/backend/pkg/db/postgres" @@ -50,10 +50,6 @@ func New(log logger.Logger, cfg *db.Config, pg *postgres.Conn, ch clickhouse.Con } func (s *saverImpl) Handle(msg Message) { - if msg.TypeID() == MsgCustomEvent { - defer s.Handle(types.WrapCustomEvent(msg.(*CustomEvent))) - } - var ( sessCtx = context.WithValue(context.Background(), "sessionID", msg.SessionID()) session *sessions.Session @@ -69,6 +65,23 @@ func (s *saverImpl) Handle(msg Message) { return } + if msg.TypeID() == MsgCustomEvent { + m := msg.(*CustomEvent) + // Try to parse custom event payload to JSON and extract or_payload field + type CustomEventPayload struct { + CustomTimestamp uint64 `json:"or_timestamp"` + } + customPayload := &CustomEventPayload{} + if err := json.Unmarshal([]byte(m.Payload), customPayload); err == nil { + if customPayload.CustomTimestamp >= session.Timestamp { + s.log.Info(sessCtx, "custom event timestamp received: %v", m.Timestamp) + msg.Meta().Timestamp = customPayload.CustomTimestamp + s.log.Info(sessCtx, "custom event timestamp updated: %v", m.Timestamp) + } + } + defer s.Handle(types.WrapCustomEvent(m)) + } + if IsMobileType(msg.TypeID()) { if err := s.handleMobileMessage(sessCtx, session, msg); err != nil { if !postgres.IsPkeyViolation(err) { diff --git a/backend/internal/db/datasaver/web.go b/backend/internal/db/datasaver/web.go index 51fcaed9f..e623b452a 100644 --- a/backend/internal/db/datasaver/web.go +++ b/backend/internal/db/datasaver/web.go @@ -2,7 +2,6 @@ package datasaver import ( "context" - "openreplay/backend/pkg/db/postgres" "openreplay/backend/pkg/db/types" "openreplay/backend/pkg/messages" diff --git a/backend/pkg/db/clickhouse/connector.go b/backend/pkg/db/clickhouse/connector.go index 51630f75c..9a0131488 100644 --- a/backend/pkg/db/clickhouse/connector.go +++ b/backend/pkg/db/clickhouse/connector.go @@ -726,7 +726,6 @@ func (c *connectorImpl) InsertRequest(session *sessions.Session, msg *messages.N func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.CustomEvent) error { jsonString, err := json.Marshal(map[string]interface{}{ - "name": msg.Name, "payload": msg.Payload, "user_device": session.UserDevice, "user_device_type": session.UserDeviceType, @@ -740,11 +739,11 @@ func (c *connectorImpl) InsertCustom(session *sessions.Session, msg *messages.Cu session.SessionID, uint16(session.ProjectID), getUUID(msg), - "CUSTOM", + msg.Name, eventTime, eventTime.Unix(), session.UserUUID, - true, + false, session.Platform, session.UserOSVersion, session.UserOS, diff --git a/backend/pkg/messages/messages.go b/backend/pkg/messages/messages.go index 5a03d6cfa..239cc011d 100644 --- a/backend/pkg/messages/messages.go +++ b/backend/pkg/messages/messages.go @@ -1466,7 +1466,7 @@ func (msg *SetNodeAttributeDict) TypeID() int { return 52 } -type ResourceTimingDeprecated struct { +type ResourceTimingDeprecatedDeprecated struct { message Timestamp uint64 Duration uint64 @@ -1478,7 +1478,7 @@ type ResourceTimingDeprecated struct { Initiator string } -func (msg *ResourceTimingDeprecated) Encode() []byte { +func (msg *ResourceTimingDeprecatedDeprecated) Encode() []byte { buf := make([]byte, 81+len(msg.URL)+len(msg.Initiator)) buf[0] = 53 p := 1 @@ -1493,11 +1493,11 @@ func (msg *ResourceTimingDeprecated) Encode() []byte { return buf[:p] } -func (msg *ResourceTimingDeprecated) Decode() Message { +func (msg *ResourceTimingDeprecatedDeprecated) Decode() Message { return msg } -func (msg *ResourceTimingDeprecated) TypeID() int { +func (msg *ResourceTimingDeprecatedDeprecated) TypeID() int { return 53 } @@ -2320,6 +2320,90 @@ func (msg *Incident) TypeID() int { return 85 } +type ResourceTiming struct { + message + Timestamp uint64 + Duration uint64 + TTFB uint64 + HeaderSize uint64 + EncodedBodySize uint64 + DecodedBodySize uint64 + URL string + Initiator string + TransferredSize uint64 + Cached bool + Queueing uint64 + DnsLookup uint64 + InitialConnection uint64 + SSL uint64 + ContentDownload uint64 + Total uint64 + Stalled uint64 +} + +func (msg *ResourceTiming) Encode() []byte { + buf := make([]byte, 171+len(msg.URL)+len(msg.Initiator)) + buf[0] = 85 + p := 1 + p = WriteUint(msg.Timestamp, buf, p) + p = WriteUint(msg.Duration, buf, p) + p = WriteUint(msg.TTFB, buf, p) + p = WriteUint(msg.HeaderSize, buf, p) + p = WriteUint(msg.EncodedBodySize, buf, p) + p = WriteUint(msg.DecodedBodySize, buf, p) + p = WriteString(msg.URL, buf, p) + p = WriteString(msg.Initiator, buf, p) + p = WriteUint(msg.TransferredSize, buf, p) + p = WriteBoolean(msg.Cached, buf, p) + p = WriteUint(msg.Queueing, buf, p) + p = WriteUint(msg.DnsLookup, buf, p) + p = WriteUint(msg.InitialConnection, buf, p) + p = WriteUint(msg.SSL, buf, p) + p = WriteUint(msg.ContentDownload, buf, p) + p = WriteUint(msg.Total, buf, p) + p = WriteUint(msg.Stalled, buf, p) + return buf[:p] +} + +func (msg *ResourceTiming) Decode() Message { + return msg +} + +func (msg *ResourceTiming) TypeID() int { + return 85 +} + +type LongAnimationTask struct { + message + Name string + Duration int64 + BlockingDuration int64 + FirstUIEventTimestamp int64 + StartTime int64 + Scripts string +} + +func (msg *LongAnimationTask) Encode() []byte { + buf := make([]byte, 61+len(msg.Name)+len(msg.Scripts)) + buf[0] = 89 + p := 1 + p = WriteString(msg.Name, buf, p) + p = WriteInt(msg.Duration, buf, p) + p = WriteInt(msg.BlockingDuration, buf, p) + p = WriteInt(msg.FirstUIEventTimestamp, buf, p) + p = WriteInt(msg.StartTime, buf, p) + p = WriteString(msg.Scripts, buf, p) + return buf[:p] +} + +func (msg *LongAnimationTask) Decode() Message { + return msg +} + +func (msg *LongAnimationTask) TypeID() int { + return 89 +} + type InputChange struct { message ID uint64 @@ -2418,7 +2502,7 @@ func (msg *UnbindNodes) TypeID() int { return 115 } -type ResourceTiming struct { +type ResourceTimingDeprecated struct { message Timestamp uint64 Duration uint64 @@ -2432,7 +2516,7 @@ type ResourceTiming struct { Cached bool } -func (msg *ResourceTiming) Encode() []byte { +func (msg *ResourceTimingDeprecated) Encode() []byte { buf := make([]byte, 101+len(msg.URL)+len(msg.Initiator)) buf[0] = 116 p := 1 @@ -2449,11 +2533,11 @@ func (msg *ResourceTiming) Encode() []byte { return buf[:p] } -func (msg *ResourceTiming) Decode() Message { +func (msg *ResourceTimingDeprecated) Decode() Message { return msg } -func (msg *ResourceTiming) TypeID() int { +func (msg *ResourceTimingDeprecated) TypeID() int { return 116 } diff --git a/backend/pkg/messages/read-message.go b/backend/pkg/messages/read-message.go index acb169e30..8e2234540 100644 --- a/backend/pkg/messages/read-message.go +++ b/backend/pkg/messages/read-message.go @@ -873,9 +873,9 @@ func DecodeSetNodeAttributeDict(reader BytesReader) (Message, error) { return msg, err } -func DecodeResourceTimingDeprecated(reader BytesReader) (Message, error) { +func DecodeResourceTimingDeprecatedDeprecated(reader BytesReader) (Message, error) { var err error = nil - msg := &ResourceTimingDeprecated{} + msg := &ResourceTimingDeprecatedDeprecated{} if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } @@ -1432,6 +1432,85 @@ func DecodeIncident(reader BytesReader) (Message, error) { return nil, err } return msg, err +func DecodeResourceTiming(reader BytesReader) (Message, error) { + var err error = nil + msg := &ResourceTiming{} + if msg.Timestamp, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Duration, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.TTFB, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.HeaderSize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.EncodedBodySize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.DecodedBodySize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.URL, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Initiator, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.TransferredSize, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Cached, err = reader.ReadBoolean(); err != nil { + return nil, err + } + if msg.Queueing, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.DnsLookup, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.InitialConnection, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.SSL, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.ContentDownload, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Total, err = reader.ReadUint(); err != nil { + return nil, err + } + if msg.Stalled, err = reader.ReadUint(); err != nil { + return nil, err + } + return msg, err +} + +func DecodeLongAnimationTask(reader BytesReader) (Message, error) { + var err error = nil + msg := &LongAnimationTask{} + if msg.Name, err = reader.ReadString(); err != nil { + return nil, err + } + if msg.Duration, err = reader.ReadInt(); err != nil { + return nil, err + } + if msg.BlockingDuration, err = reader.ReadInt(); err != nil { + return nil, err + } + if msg.FirstUIEventTimestamp, err = reader.ReadInt(); err != nil { + return nil, err + } + if msg.StartTime, err = reader.ReadInt(); err != nil { + return nil, err + } + if msg.Scripts, err = reader.ReadString(); err != nil { + return nil, err + } + return msg, err } func DecodeInputChange(reader BytesReader) (Message, error) { @@ -1491,9 +1570,9 @@ func DecodeUnbindNodes(reader BytesReader) (Message, error) { return msg, err } -func DecodeResourceTiming(reader BytesReader) (Message, error) { +func DecodeResourceTimingDeprecated(reader BytesReader) (Message, error) { var err error = nil - msg := &ResourceTiming{} + msg := &ResourceTimingDeprecated{} if msg.Timestamp, err = reader.ReadUint(); err != nil { return nil, err } @@ -2202,7 +2281,7 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) { case 52: return DecodeSetNodeAttributeDict(reader) case 53: - return DecodeResourceTimingDeprecated(reader) + return DecodeResourceTimingDeprecatedDeprecated(reader) case 54: return DecodeConnectionInformation(reader) case 55: @@ -2264,7 +2343,9 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) { case 84: return DecodeWSChannel(reader) case 85: - return DecodeIncident(reader) + return DecodeResourceTiming(reader) + case 89: + return DecodeLongAnimationTask(reader) case 112: return DecodeInputChange(reader) case 113: @@ -2274,7 +2355,7 @@ func ReadMessage(t uint64, reader BytesReader) (Message, error) { case 115: return DecodeUnbindNodes(reader) case 116: - return DecodeResourceTiming(reader) + return DecodeResourceTimingDeprecated(reader) case 117: return DecodeTabChange(reader) case 118: diff --git a/backend/pkg/sessions/api/web/handlers.go b/backend/pkg/sessions/api/web/handlers.go index a8538ba3f..f3530015a 100644 --- a/backend/pkg/sessions/api/web/handlers.go +++ b/backend/pkg/sessions/api/web/handlers.go @@ -154,13 +154,6 @@ func (e *handlersImpl) startSessionHandlerWeb(w http.ResponseWriter, r *http.Req // Add projectID to context r = r.WithContext(context.WithValue(r.Context(), "projectID", fmt.Sprintf("%d", p.ProjectID))) - // Validate tracker version - if err := validateTrackerVersion(req.TrackerVersion); err != nil { - e.log.Error(r.Context(), "unsupported tracker version: %s, err: %s", req.TrackerVersion, err) - e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusUpgradeRequired, errors.New("please upgrade the tracker version"), startTime, r.URL.Path, bodySize) - return - } - // Check if the project supports mobile sessions if !p.IsWeb() { e.responser.ResponseWithError(e.log, r.Context(), w, http.StatusForbidden, errors.New("project doesn't support web sessions"), startTime, r.URL.Path, bodySize) diff --git a/ee/api/.gitignore b/ee/api/.gitignore index 062cde81f..d5392c84d 100644 --- a/ee/api/.gitignore +++ b/ee/api/.gitignore @@ -187,7 +187,7 @@ Pipfile.lock /chalicelib/core/announcements.py /chalicelib/core/assist.py /chalicelib/core/authorizers.py -/chalicelib/core/autocomplete/autocomplete.py +/chalicelib/core/autocomplete /chalicelib/core/boarding.py /chalicelib/core/canvas.py /chalicelib/core/collaborations/__init__.py diff --git a/ee/api/chalicelib/core/autocomplete/__init__.py b/ee/api/chalicelib/core/autocomplete/__init__.py deleted file mode 100644 index 0186c2f29..000000000 --- a/ee/api/chalicelib/core/autocomplete/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -import logging - -from decouple import config - -logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) - -if config("EXP_AUTOCOMPLETE", cast=bool, default=False): - logging.info(">>> Using experimental autocomplete") - from . import autocomplete_ch as autocomplete -else: - from . import autocomplete diff --git a/ee/api/chalicelib/utils/exp_ch_helper.py b/ee/api/chalicelib/utils/exp_ch_helper.py index 1c26c1b22..c31707894 100644 --- a/ee/api/chalicelib/utils/exp_ch_helper.py +++ b/ee/api/chalicelib/utils/exp_ch_helper.py @@ -1,9 +1,16 @@ -from typing import Union +import logging +import math +import re +import struct +from decimal import Decimal +from typing import Any, Union + +from decouple import config import schemas +from chalicelib.utils import sql_helper as sh from chalicelib.utils.TimeUTC import TimeUTC -from decouple import config -import logging +from schemas import SearchEventOperator logger = logging.getLogger(__name__) @@ -110,12 +117,13 @@ def simplify_clickhouse_type(ch_type: str) -> str: return "int" # Floats: Float32, Float64 - if re.match(r'^float(32|64)$', normalized_type): + if re.match(r'^float(32|64)|double$', normalized_type): return "float" # Decimal: Decimal(P, S) if normalized_type.startswith("decimal"): - return "decimal" + # return "decimal" + return "float" # Date/DateTime if normalized_type.startswith("date"): @@ -131,11 +139,13 @@ def simplify_clickhouse_type(ch_type: str) -> str: # UUID if normalized_type.startswith("uuid"): - return "uuid" + # return "uuid" + return "string" # Enums: Enum8(...) or Enum16(...) if normalized_type.startswith("enum8") or normalized_type.startswith("enum16"): - return "enum" + # return "enum" + return "string" # Arrays: Array(T) if normalized_type.startswith("array"): @@ -166,8 +176,73 @@ def simplify_clickhouse_types(ch_types: list[str]) -> list[str]: def get_sub_condition(col_name: str, val_name: str, - operator: Union[schemas.SearchEventOperator, schemas.MathOperator]): + operator: Union[schemas.SearchEventOperator, schemas.MathOperator]) -> str: if operator == SearchEventOperator.PATTERN: return f"match({col_name}, %({val_name})s)" op = sh.get_sql_operator(operator) return f"{col_name} {op} %({val_name})s" + + +def get_col_cast(data_type: schemas.PropertyType, value: Any) -> str: + if value is None or len(value) == 0: + return "" + if isinstance(value, list): + value = value[0] + if data_type in (schemas.PropertyType.INT, schemas.PropertyType.FLOAT): + return best_clickhouse_type(value) + return data_type.capitalize() + + +# (type_name, minimum, maximum) – ordered by increasing size +_INT_RANGES = [ + ("Int8", -128, 127), + ("UInt8", 0, 255), + ("Int16", -32_768, 32_767), + ("UInt16", 0, 65_535), + ("Int32", -2_147_483_648, 2_147_483_647), + ("UInt32", 0, 4_294_967_295), + ("Int64", -9_223_372_036_854_775_808, 9_223_372_036_854_775_807), + ("UInt64", 0, 18_446_744_073_709_551_615), +] + + +def best_clickhouse_type(value): + """ + Return the most compact ClickHouse numeric type that can store *value* loss-lessly. + + """ + # Treat bool like tiny int + if isinstance(value, bool): + value = int(value) + + # --- Integers --- + if isinstance(value, int): + for name, lo, hi in _INT_RANGES: + if lo <= value <= hi: + return name + # Beyond UInt64: ClickHouse offers Int128 / Int256 or Decimal + return "Int128" + + # --- Decimal.Decimal (exact) --- + if isinstance(value, Decimal): + # ClickHouse Decimal32/64/128 have 9 / 18 / 38 significant digits. + digits = len(value.as_tuple().digits) + if digits <= 9: + return "Decimal32" + elif digits <= 18: + return "Decimal64" + else: + return "Decimal128" + + # --- Floats --- + if isinstance(value, float): + if not math.isfinite(value): + return "Float64" # inf / nan → always Float64 + + # Check if a round-trip through 32-bit float preserves the bit pattern + packed = struct.pack("f", value) + if struct.unpack("f", packed)[0] == value: + return "Float32" + return "Float64" + + raise TypeError(f"Unsupported type: {type(value).__name__}") diff --git a/ee/api/clean-dev.sh b/ee/api/clean-dev.sh index 3162b3d4e..2e42c9d29 100755 --- a/ee/api/clean-dev.sh +++ b/ee/api/clean-dev.sh @@ -9,7 +9,7 @@ rm -rf ./build_crons.sh rm -rf ./chalicelib/core/announcements.py rm -rf ./chalicelib/core/assist.py rm -rf ./chalicelib/core/authorizers.py -rm -rf ./chalicelib/core/autocomplete/autocomplete.py +rm -rf ./chalicelib/core/autocomplete rm -rf ./chalicelib/core/collaborations/__init__.py rm -rf ./chalicelib/core/collaborations/collaboration_base.py rm -rf ./chalicelib/core/collaborations/collaboration_msteams.py diff --git a/ee/connectors/msgcodec/messages.py b/ee/connectors/msgcodec/messages.py index 9022e72b3..7aabdc46f 100644 --- a/ee/connectors/msgcodec/messages.py +++ b/ee/connectors/msgcodec/messages.py @@ -500,7 +500,7 @@ class SetNodeAttributeDict(Message): self.value = value -class ResourceTimingDeprecated(Message): +class ResourceTimingDeprecatedDeprecated(Message): __id__ = 53 def __init__(self, timestamp, duration, ttfb, header_size, encoded_body_size, decoded_body_size, url, initiator): @@ -806,13 +806,39 @@ class WSChannel(Message): self.message_type = message_type -class Incident(Message): +class ResourceTiming(Message): __id__ = 85 - def __init__(self, label, start_time, end_time): - self.label = label + def __init__(self, timestamp, duration, ttfb, header_size, encoded_body_size, decoded_body_size, url, initiator, transferred_size, cached, queueing, dns_lookup, initial_connection, ssl, content_download, total, stalled): + self.timestamp = timestamp + self.duration = duration + self.ttfb = ttfb + self.header_size = header_size + self.encoded_body_size = encoded_body_size + self.decoded_body_size = decoded_body_size + self.url = url + self.initiator = initiator + self.transferred_size = transferred_size + self.cached = cached + self.queueing = queueing + self.dns_lookup = dns_lookup + self.initial_connection = initial_connection + self.ssl = ssl + self.content_download = content_download + self.total = total + self.stalled = stalled + + +class LongAnimationTask(Message): + __id__ = 89 + + def __init__(self, name, duration, blocking_duration, first_ui_event_timestamp, start_time, scripts): + self.name = name + self.duration = duration + self.blocking_duration = blocking_duration + self.first_ui_event_timestamp = first_ui_event_timestamp self.start_time = start_time - self.end_time = end_time + self.scripts = scripts class InputChange(Message): @@ -850,7 +876,7 @@ class UnbindNodes(Message): self.total_removed_percent = total_removed_percent -class ResourceTiming(Message): +class ResourceTimingDeprecated(Message): __id__ = 116 def __init__(self, timestamp, duration, ttfb, header_size, encoded_body_size, decoded_body_size, url, initiator, transferred_size, cached): diff --git a/ee/connectors/msgcodec/messages.pyx b/ee/connectors/msgcodec/messages.pyx index dcc0ce8f2..1c9893ef8 100644 --- a/ee/connectors/msgcodec/messages.pyx +++ b/ee/connectors/msgcodec/messages.pyx @@ -743,7 +743,7 @@ cdef class SetNodeAttributeDict(PyMessage): self.value = value -cdef class ResourceTimingDeprecated(PyMessage): +cdef class ResourceTimingDeprecatedDeprecated(PyMessage): cdef public int __id__ cdef public unsigned long timestamp cdef public unsigned long duration @@ -1200,17 +1200,64 @@ cdef class WSChannel(PyMessage): self.message_type = message_type -cdef class Incident(PyMessage): +cdef class ResourceTiming(PyMessage): cdef public int __id__ - cdef public str label - cdef public long start_time - cdef public long end_time + cdef public unsigned long timestamp + cdef public unsigned long duration + cdef public unsigned long ttfb + cdef public unsigned long header_size + cdef public unsigned long encoded_body_size + cdef public unsigned long decoded_body_size + cdef public str url + cdef public str initiator + cdef public unsigned long transferred_size + cdef public bint cached + cdef public unsigned long queueing + cdef public unsigned long dns_lookup + cdef public unsigned long initial_connection + cdef public unsigned long ssl + cdef public unsigned long content_download + cdef public unsigned long total + cdef public unsigned long stalled - def __init__(self, str label, long start_time, long end_time): + def __init__(self, unsigned long timestamp, unsigned long duration, unsigned long ttfb, unsigned long header_size, unsigned long encoded_body_size, unsigned long decoded_body_size, str url, str initiator, unsigned long transferred_size, bint cached, unsigned long queueing, unsigned long dns_lookup, unsigned long initial_connection, unsigned long ssl, unsigned long content_download, unsigned long total, unsigned long stalled): self.__id__ = 85 - self.label = label + self.timestamp = timestamp + self.duration = duration + self.ttfb = ttfb + self.header_size = header_size + self.encoded_body_size = encoded_body_size + self.decoded_body_size = decoded_body_size + self.url = url + self.initiator = initiator + self.transferred_size = transferred_size + self.cached = cached + self.queueing = queueing + self.dns_lookup = dns_lookup + self.initial_connection = initial_connection + self.ssl = ssl + self.content_download = content_download + self.total = total + self.stalled = stalled + + +cdef class LongAnimationTask(PyMessage): + cdef public int __id__ + cdef public str name + cdef public long duration + cdef public long blocking_duration + cdef public long first_ui_event_timestamp + cdef public long start_time + cdef public str scripts + + def __init__(self, str name, long duration, long blocking_duration, long first_ui_event_timestamp, long start_time, str scripts): + self.__id__ = 89 + self.name = name + self.duration = duration + self.blocking_duration = blocking_duration + self.first_ui_event_timestamp = first_ui_event_timestamp self.start_time = start_time - self.end_time = end_time + self.scripts = scripts cdef class InputChange(PyMessage): @@ -1263,7 +1310,7 @@ cdef class UnbindNodes(PyMessage): self.total_removed_percent = total_removed_percent -cdef class ResourceTiming(PyMessage): +cdef class ResourceTimingDeprecated(PyMessage): cdef public int __id__ cdef public unsigned long timestamp cdef public unsigned long duration diff --git a/ee/connectors/msgcodec/msgcodec.py b/ee/connectors/msgcodec/msgcodec.py index 09f0e4c48..5db27b325 100644 --- a/ee/connectors/msgcodec/msgcodec.py +++ b/ee/connectors/msgcodec/msgcodec.py @@ -486,7 +486,7 @@ class MessageCodec(Codec): ) if message_id == 53: - return ResourceTimingDeprecated( + return ResourceTimingDeprecatedDeprecated( timestamp=self.read_uint(reader), duration=self.read_uint(reader), ttfb=self.read_uint(reader), @@ -730,10 +730,34 @@ class MessageCodec(Codec): ) if message_id == 85: - return Incident( - label=self.read_string(reader), + return ResourceTiming( + timestamp=self.read_uint(reader), + duration=self.read_uint(reader), + ttfb=self.read_uint(reader), + header_size=self.read_uint(reader), + encoded_body_size=self.read_uint(reader), + decoded_body_size=self.read_uint(reader), + url=self.read_string(reader), + initiator=self.read_string(reader), + transferred_size=self.read_uint(reader), + cached=self.read_boolean(reader), + queueing=self.read_uint(reader), + dns_lookup=self.read_uint(reader), + initial_connection=self.read_uint(reader), + ssl=self.read_uint(reader), + content_download=self.read_uint(reader), + total=self.read_uint(reader), + stalled=self.read_uint(reader) + ) + + if message_id == 89: + return LongAnimationTask( + name=self.read_string(reader), + duration=self.read_int(reader), + blocking_duration=self.read_int(reader), + first_ui_event_timestamp=self.read_int(reader), start_time=self.read_int(reader), - end_time=self.read_int(reader) + scripts=self.read_string(reader) ) if message_id == 112: @@ -764,7 +788,7 @@ class MessageCodec(Codec): ) if message_id == 116: - return ResourceTiming( + return ResourceTimingDeprecated( timestamp=self.read_uint(reader), duration=self.read_uint(reader), ttfb=self.read_uint(reader), diff --git a/ee/connectors/msgcodec/msgcodec.pyx b/ee/connectors/msgcodec/msgcodec.pyx index 436acbef0..7b8fcb976 100644 --- a/ee/connectors/msgcodec/msgcodec.pyx +++ b/ee/connectors/msgcodec/msgcodec.pyx @@ -584,7 +584,7 @@ cdef class MessageCodec: ) if message_id == 53: - return ResourceTimingDeprecated( + return ResourceTimingDeprecatedDeprecated( timestamp=self.read_uint(reader), duration=self.read_uint(reader), ttfb=self.read_uint(reader), @@ -828,10 +828,34 @@ cdef class MessageCodec: ) if message_id == 85: - return Incident( - label=self.read_string(reader), + return ResourceTiming( + timestamp=self.read_uint(reader), + duration=self.read_uint(reader), + ttfb=self.read_uint(reader), + header_size=self.read_uint(reader), + encoded_body_size=self.read_uint(reader), + decoded_body_size=self.read_uint(reader), + url=self.read_string(reader), + initiator=self.read_string(reader), + transferred_size=self.read_uint(reader), + cached=self.read_boolean(reader), + queueing=self.read_uint(reader), + dns_lookup=self.read_uint(reader), + initial_connection=self.read_uint(reader), + ssl=self.read_uint(reader), + content_download=self.read_uint(reader), + total=self.read_uint(reader), + stalled=self.read_uint(reader) + ) + + if message_id == 89: + return LongAnimationTask( + name=self.read_string(reader), + duration=self.read_int(reader), + blocking_duration=self.read_int(reader), + first_ui_event_timestamp=self.read_int(reader), start_time=self.read_int(reader), - end_time=self.read_int(reader) + scripts=self.read_string(reader) ) if message_id == 112: @@ -862,7 +886,7 @@ cdef class MessageCodec: ) if message_id == 116: - return ResourceTiming( + return ResourceTimingDeprecated( timestamp=self.read_uint(reader), duration=self.read_uint(reader), ttfb=self.read_uint(reader), diff --git a/frontend/.yarnrc.yml b/frontend/.yarnrc.yml index f3bd10e60..da3ec2ca7 100644 --- a/frontend/.yarnrc.yml +++ b/frontend/.yarnrc.yml @@ -2,6 +2,6 @@ compressionLevel: 1 enableGlobalCache: true -nodeLinker: pnpm +nodeLinker: node-modules yarnPath: .yarn/releases/yarn-4.7.0.cjs diff --git a/frontend/app/PrivateRoutes.tsx b/frontend/app/PrivateRoutes.tsx index 933627087..f16e41523 100644 --- a/frontend/app/PrivateRoutes.tsx +++ b/frontend/app/PrivateRoutes.tsx @@ -10,6 +10,7 @@ import { Loader } from 'UI'; import APIClient from './api_client'; import * as routes from './routes'; import { debounceCall } from '@/utils'; +import { hasAi } from './utils/split-utils'; const components: any = { SessionPure: lazy(() => import('Components/Session/Session')), @@ -32,7 +33,8 @@ const components: any = { SpotsListPure: lazy(() => import('Components/Spots/SpotsList')), SpotPure: lazy(() => import('Components/Spots/SpotPlayer')), ScopeSetup: lazy(() => import('Components/ScopeForm')), - HighlightsPure: lazy(() => import('Components/Highlights/HighlightsList')) + HighlightsPure: lazy(() => import('Components/Highlights/HighlightsList')), + KaiPure: lazy(() => import('Components/Kai/KaiChat')), }; const enhancedComponents: any = { @@ -52,7 +54,8 @@ const enhancedComponents: any = { SpotsList: withSiteIdUpdater(components.SpotsListPure), Spot: components.SpotPure, ScopeSetup: components.ScopeSetup, - Highlights: withSiteIdUpdater(components.HighlightsPure) + Highlights: withSiteIdUpdater(components.HighlightsPure), + Kai: withSiteIdUpdater(components.KaiPure), }; const { withSiteId } = routes; @@ -97,9 +100,11 @@ const SPOT_PATH = routes.spot(); const SCOPE_SETUP = routes.scopeSetup(); const HIGHLIGHTS_PATH = routes.highlights(); +const KAI_PATH = routes.kai(); function PrivateRoutes() { - const { projectsStore, userStore, integrationsStore, searchStore } = useStore(); + const { projectsStore, userStore, integrationsStore, searchStore } = + useStore(); const onboarding = userStore.onboarding; const scope = userStore.scopeState; const { tenantId } = userStore.account; @@ -123,8 +128,12 @@ function PrivateRoutes() { React.useEffect(() => { if (!searchStore.urlParsed) return; - debounceCall(() => searchStore.fetchSessions(true), 250)() - }, [searchStore.urlParsed, searchStore.instance.filters, searchStore.instance.eventsOrder]); + debounceCall(() => searchStore.fetchSessions(true), 250)(); + }, [ + searchStore.urlParsed, + searchStore.instance.filters, + searchStore.instance.eventsOrder, + ]); return ( }> @@ -162,13 +171,13 @@ function PrivateRoutes() { case '/integrations/slack': client.post('integrations/slack/add', { code: location.search.split('=')[1], - state: tenantId + state: tenantId, }); break; case '/integrations/msteams': client.post('integrations/msteams/add', { code: location.search.split('=')[1], - state: tenantId + state: tenantId, }); break; } @@ -193,7 +202,7 @@ function PrivateRoutes() { withSiteId(DASHBOARD_PATH, siteIdList), withSiteId(DASHBOARD_SELECT_PATH, siteIdList), withSiteId(DASHBOARD_METRIC_CREATE_PATH, siteIdList), - withSiteId(DASHBOARD_METRIC_DETAILS_PATH, siteIdList) + withSiteId(DASHBOARD_METRIC_DETAILS_PATH, siteIdList), ]} component={enhancedComponents.Dashboard} /> @@ -254,7 +263,7 @@ function PrivateRoutes() { withSiteId(FFLAG_READ_PATH, siteIdList), withSiteId(FFLAG_CREATE_PATH, siteIdList), withSiteId(NOTES_PATH, siteIdList), - withSiteId(BOOKMARKS_PATH, siteIdList) + withSiteId(BOOKMARKS_PATH, siteIdList), ]} component={enhancedComponents.SessionsOverview} /> @@ -270,6 +279,14 @@ function PrivateRoutes() { path={withSiteId(LIVE_SESSION_PATH, siteIdList)} component={enhancedComponents.LiveSession} /> + {hasAi ? ( + + ) : null} {Object.entries(routes.redirects).map(([fr, to]) => ( ))} diff --git a/frontend/app/api_client.ts b/frontend/app/api_client.ts index a2dc3cb4c..899cf904d 100644 --- a/frontend/app/api_client.ts +++ b/frontend/app/api_client.ts @@ -60,7 +60,7 @@ export default class APIClient { private siteIdCheck: (() => { siteId: string | null }) | undefined; - private getJwt: () => string | null = () => null; + public getJwt: () => string | null = () => null; private onUpdateJwt: (data: { jwt?: string; spotJwt?: string }) => void; @@ -197,7 +197,7 @@ export default class APIClient { delete init.credentials; } - const noChalice = path.includes('v1/integrations') || path.includes('/spot') && !path.includes('/login'); + const noChalice = path.includes('/kai') || path.includes('v1/integrations') || path.includes('/spot') && !path.includes('/login'); let edp = window.env.API_EDP || window.location.origin + '/api'; if (noChalice && !edp.includes('api.openreplay.com')) { edp = edp.replace('/api', ''); diff --git a/frontend/app/assets/img/logo-img.png b/frontend/app/assets/img/logo-img.png new file mode 100644 index 000000000..1750a4d84 Binary files /dev/null and b/frontend/app/assets/img/logo-img.png differ diff --git a/frontend/app/components/Assist/AssistView.tsx b/frontend/app/components/Assist/AssistView.tsx index c16bd0cd7..95579face 100644 --- a/frontend/app/components/Assist/AssistView.tsx +++ b/frontend/app/components/Assist/AssistView.tsx @@ -3,11 +3,12 @@ import LiveSessionList from 'Shared/LiveSessionList'; import LiveSessionSearch from 'Shared/LiveSessionSearch'; import usePageTitle from '@/hooks/usePageTitle'; import AssistSearchActions from './AssistSearchActions'; +import { PANEL_SIZES } from 'App/constants/panelSizes' function AssistView() { usePageTitle('Co-Browse - OpenReplay'); return ( -
+
diff --git a/frontend/app/components/Assist/RecordingsList/Recordings.tsx b/frontend/app/components/Assist/RecordingsList/Recordings.tsx index 074386797..a2aeb03a6 100644 --- a/frontend/app/components/Assist/RecordingsList/Recordings.tsx +++ b/frontend/app/components/Assist/RecordingsList/Recordings.tsx @@ -7,6 +7,7 @@ import { observer } from 'mobx-react-lite'; import RecordingsList from './RecordingsList'; import RecordingsSearch from './RecordingsSearch'; import { useTranslation } from 'react-i18next'; +import { PANEL_SIZES } from 'App/constants/panelSizes' function Recordings() { const { t } = useTranslation(); @@ -24,7 +25,7 @@ function Recordings() { return (
diff --git a/frontend/app/components/Client/Client.tsx b/frontend/app/components/Client/Client.tsx index cf41335a9..e24cf9f91 100644 --- a/frontend/app/components/Client/Client.tsx +++ b/frontend/app/components/Client/Client.tsx @@ -2,6 +2,7 @@ import React from 'react'; import { withRouter } from 'react-router-dom'; import { Switch, Route, Redirect } from 'react-router'; import { CLIENT_TABS, client as clientRoute } from 'App/routes'; +import { PANEL_SIZES } from 'App/constants/panelSizes' import SessionsListingSettings from 'Components/Client/SessionsListingSettings'; import Modules from 'Components/Client/Modules'; @@ -105,7 +106,7 @@ export default class Client extends React.PureComponent { }, } = this.props; return ( -
+
{activeTab && this.renderActiveTab()}
); diff --git a/frontend/app/components/Client/SessionsListingSettings.tsx b/frontend/app/components/Client/SessionsListingSettings.tsx index 02875ed0c..8fc159911 100644 --- a/frontend/app/components/Client/SessionsListingSettings.tsx +++ b/frontend/app/components/Client/SessionsListingSettings.tsx @@ -6,6 +6,7 @@ import DefaultPlaying from 'Shared/SessionSettings/components/DefaultPlaying'; import DefaultTimezone from 'Shared/SessionSettings/components/DefaultTimezone'; import ListingVisibility from 'Shared/SessionSettings/components/ListingVisibility'; import MouseTrailSettings from 'Shared/SessionSettings/components/MouseTrailSettings'; +import VirtualModeSettings from '../shared/SessionSettings/components/VirtualMode'; import DebugLog from './DebugLog'; import { useTranslation } from 'react-i18next'; @@ -35,6 +36,7 @@ function SessionsListingSettings() {
+
diff --git a/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx b/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx index 92ec68dd2..f1c980172 100644 --- a/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/AlertsView.tsx @@ -10,6 +10,7 @@ import { useStore } from 'App/mstore'; import AlertsList from './AlertsList'; import AlertsSearch from './AlertsSearch'; import { useTranslation } from 'react-i18next'; +import { PANEL_SIZES } from 'App/constants/panelSizes' interface IAlertsView { siteId: string; @@ -30,7 +31,7 @@ function AlertsView({ siteId }: IAlertsView) { }, [history]); return (
diff --git a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx index b679bccdb..a1843c4e6 100644 --- a/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx +++ b/frontend/app/components/Dashboard/components/Alerts/NewAlert.tsx @@ -16,6 +16,7 @@ import NotifyHooks from './AlertForm/NotifyHooks'; import AlertListItem from './AlertListItem'; import Condition from './AlertForm/Condition'; import { useTranslation } from 'react-i18next'; +import { PANEL_SIZES } from 'App/constants/panelSizes' function Circle({ text }: { text: string }) { return ( @@ -200,7 +201,7 @@ function NewAlert(props: IProps) { const isThreshold = instance.detectionMethod === 'threshold'; return ( -
+
diff --git a/frontend/app/components/Dashboard/components/DashboardModal/DashboardModal.tsx b/frontend/app/components/Dashboard/components/DashboardModal/DashboardModal.tsx index a9c92298f..e175c8ccb 100644 --- a/frontend/app/components/Dashboard/components/DashboardModal/DashboardModal.tsx +++ b/frontend/app/components/Dashboard/components/DashboardModal/DashboardModal.tsx @@ -8,6 +8,7 @@ import { dashboardMetricCreate, withSiteId } from 'App/routes'; import DashboardForm from '../DashboardForm'; import DashboardMetricSelection from '../DashboardMetricSelection'; import { useTranslation } from 'react-i18next'; +import { PANEL_SIZES } from 'App/constants/panelSizes' interface Props extends RouteComponentProps { history: any; @@ -57,7 +58,7 @@ function DashboardModal(props: Props) { backgroundColor: '#FAFAFA', zIndex: 999, width: '100%', - maxWidth: '1360px', + maxWidth: PANEL_SIZES.maxWidth, }} >
diff --git a/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx b/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx index 4ed7bece5..85b15b17e 100644 --- a/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx +++ b/frontend/app/components/Dashboard/components/DashboardView/DashboardView.tsx @@ -14,6 +14,7 @@ import DashboardHeader from '../DashboardHeader'; import DashboardModal from '../DashboardModal'; import DashboardWidgetGrid from '../DashboardWidgetGrid'; import AiQuery from './AiQuery'; +import { PANEL_SIZES } from 'App/constants/panelSizes' interface IProps { siteId: string; @@ -103,7 +104,7 @@ function DashboardView(props: Props) { return (
{/* @ts-ignore */} diff --git a/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx b/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx index 380f205ea..a885c4e0f 100644 --- a/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx +++ b/frontend/app/components/Dashboard/components/MetricsView/MetricsView.tsx @@ -3,6 +3,7 @@ import withPageTitle from 'HOCs/withPageTitle'; import { observer } from 'mobx-react-lite'; import MetricsList from '../MetricsList'; import MetricViewHeader from '../MetricViewHeader'; +import { PANEL_SIZES } from 'App/constants/panelSizes' interface Props { siteId: string; @@ -10,7 +11,7 @@ interface Props { function MetricsView({ siteId }: Props) { return (
diff --git a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx index 5ce1d865a..6eea04830 100644 --- a/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx +++ b/frontend/app/components/Dashboard/components/WidgetSessions/WidgetSessions.tsx @@ -1,395 +1,394 @@ -import React, {useEffect, useState} from 'react'; -import {NoContent, Loader, Pagination} from 'UI'; -import {Button, Tag, Tooltip, Dropdown, message} from 'antd'; -import {UndoOutlined, DownOutlined} from '@ant-design/icons'; +import React, { useEffect, useState } from 'react'; +import { NoContent, Loader, Pagination } from 'UI'; +import { Button, Tag, Tooltip, Dropdown, message } from 'antd'; +import { UndoOutlined, DownOutlined } from '@ant-design/icons'; import cn from 'classnames'; -import {useStore} from 'App/mstore'; +import { useStore } from 'App/mstore'; import SessionItem from 'Shared/SessionItem'; -import {observer} from 'mobx-react-lite'; -import {DateTime} from 'luxon'; -import {debounce, numberWithCommas} from 'App/utils'; +import { observer } from 'mobx-react-lite'; +import { DateTime } from 'luxon'; +import { debounce, numberWithCommas } from 'App/utils'; import useIsMounted from 'App/hooks/useIsMounted'; -import AnimatedSVG, {ICONS} from 'Shared/AnimatedSVG/AnimatedSVG'; -import {HEATMAP, USER_PATH, FUNNEL} from 'App/constants/card'; -import {useTranslation} from 'react-i18next'; +import AnimatedSVG, { ICONS } from 'Shared/AnimatedSVG/AnimatedSVG'; +import { HEATMAP, USER_PATH, FUNNEL } from 'App/constants/card'; +import { useTranslation } from 'react-i18next'; interface Props { - className?: string; + className?: string; } function WidgetSessions(props: Props) { - const {t} = useTranslation(); - const listRef = React.useRef(null); - const {className = ''} = props; - const [activeSeries, setActiveSeries] = useState('all'); - const [data, setData] = useState([]); - const isMounted = useIsMounted(); - const [loading, setLoading] = useState(false); - // all filtering done through series now - const filteredSessions = getListSessionsBySeries(data, 'all'); - const {dashboardStore, metricStore, sessionStore, customFieldStore} = - useStore(); - const focusedSeries = metricStore.focusedSeriesName; - const filter = dashboardStore.drillDownFilter; - const widget = metricStore.instance; - const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat( - 'LLL dd, yyyy HH:mm', - ); - const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat( - 'LLL dd, yyyy HH:mm', - ); - const [seriesOptions, setSeriesOptions] = useState([ - {label: t('All'), value: 'all'}, - ]); - const hasFilters = - filter.filters.length > 0 || - filter.startTimestamp !== dashboardStore.drillDownPeriod.start || - filter.endTimestamp !== dashboardStore.drillDownPeriod.end; - const filterText = filter.filters.length > 0 ? filter.filters[0].value : ''; - const metaList = customFieldStore.list.map((i: any) => i.key); + const { t } = useTranslation(); + const listRef = React.useRef(null); + const { className = '' } = props; + const [activeSeries, setActiveSeries] = useState('all'); + const [data, setData] = useState([]); + const isMounted = useIsMounted(); + const [loading, setLoading] = useState(false); + // all filtering done through series now + const filteredSessions = getListSessionsBySeries(data, 'all'); + const { dashboardStore, metricStore, sessionStore, customFieldStore } = + useStore(); + const focusedSeries = metricStore.focusedSeriesName; + const filter = dashboardStore.drillDownFilter; + const widget = metricStore.instance; + const startTime = DateTime.fromMillis(filter.startTimestamp).toFormat( + 'LLL dd, yyyy HH:mm', + ); + const endTime = DateTime.fromMillis(filter.endTimestamp).toFormat( + 'LLL dd, yyyy HH:mm', + ); + const [seriesOptions, setSeriesOptions] = useState([ + { label: t('All'), value: 'all' }, + ]); + const hasFilters = + filter.filters.length > 0 || + filter.startTimestamp !== dashboardStore.drillDownPeriod.start || + filter.endTimestamp !== dashboardStore.drillDownPeriod.end; + const filterText = filter.filters.length > 0 ? filter.filters[0].value : ''; + const metaList = customFieldStore.list.map((i: any) => i.key); - const seriesDropdownItems = seriesOptions.map((option) => ({ - key: option.value, - label: ( -
setActiveSeries(option.value)}>{option.label}
- ), + const seriesDropdownItems = seriesOptions.map((option) => ({ + key: option.value, + label: ( +
setActiveSeries(option.value)}>{option.label}
+ ), + })); + + useEffect(() => { + if (!widget.series) return; + const seriesOptions = widget.series.map((item: any) => ({ + label: item.name, + value: item.seriesId ?? item.name, })); + setSeriesOptions([{ label: t('All'), value: 'all' }, ...seriesOptions]); + }, [widget.series.length]); - useEffect(() => { - if (!widget.series) return; - const seriesOptions = widget.series.map((item: any) => ({ - label: item.name, - value: item.seriesId ?? item.name, - })); - setSeriesOptions([{label: t('All'), value: 'all'}, ...seriesOptions]); - }, [widget.series.length]); + const fetchSessions = (metricId: any, filter: any) => { + if (!isMounted()) return; - const fetchSessions = (metricId: any, filter: any) => { - if (!isMounted()) return; + if (widget.metricType === FUNNEL) { + if (filter.series[0].filter.filters.length === 0) { + setLoading(false); + return setData([]); + } + } - if (widget.metricType === FUNNEL) { - if (filter.series[0].filter.filters.length === 0) { - setLoading(false); - return setData([]); - } + setLoading(true); + const filterCopy = { ...filter }; + delete filterCopy.eventsOrderSupport; + + try { + // Handle filters properly with null checks + if (filterCopy.filters && filterCopy.filters.length > 0) { + // Ensure the nested path exists before pushing + if (filterCopy.series?.[0]?.filter) { + if (!filterCopy.series[0].filter.filters) { + filterCopy.series[0].filter.filters = []; + } + filterCopy.series[0].filter.filters.push(...filterCopy.filters); } - - - setLoading(true); - const filterCopy = {...filter}; - delete filterCopy.eventsOrderSupport; - - try { - // Handle filters properly with null checks - if (filterCopy.filters && filterCopy.filters.length > 0) { - // Ensure the nested path exists before pushing - if (filterCopy.series?.[0]?.filter) { - if (!filterCopy.series[0].filter.filters) { - filterCopy.series[0].filter.filters = []; - } - filterCopy.series[0].filter.filters.push(...filterCopy.filters); - } - filterCopy.filters = []; - } - } catch (e) { - // do nothing + filterCopy.filters = []; + } + } catch (e) { + // do nothing + } + widget + .fetchSessions(metricId, filterCopy) + .then((res: any) => { + setData(res); + if (metricStore.drillDown) { + setTimeout(() => { + message.info(t('Sessions Refreshed!')); + listRef.current?.scrollIntoView({ behavior: 'smooth' }); + metricStore.setDrillDown(false); + }, 0); } - widget - .fetchSessions(metricId, filterCopy) - .then((res: any) => { - setData(res); - if (metricStore.drillDown) { - setTimeout(() => { - message.info(t('Sessions Refreshed!')); - listRef.current?.scrollIntoView({behavior: 'smooth'}); - metricStore.setDrillDown(false); - }, 0); - } - }) - .finally(() => { - setLoading(false); - }); - }; - const fetchClickmapSessions = (customFilters: Record) => { - sessionStore.getSessions(customFilters).then((data) => { - setData([{...data, seriesId: 1, seriesName: 'Clicks'}]); - }); - }; - const debounceRequest: any = React.useCallback( - debounce(fetchSessions, 1000), - [], - ); - const debounceClickMapSearch = React.useCallback( - debounce(fetchClickmapSessions, 1000), - [], - ); + }) + .finally(() => { + setLoading(false); + }); + }; + const fetchClickmapSessions = (customFilters: Record) => { + sessionStore.getSessions(customFilters).then((data) => { + setData([{ ...data, seriesId: 1, seriesName: 'Clicks' }]); + }); + }; + const debounceRequest: any = React.useCallback( + debounce(fetchSessions, 1000), + [], + ); + const debounceClickMapSearch = React.useCallback( + debounce(fetchClickmapSessions, 1000), + [], + ); - const depsString = JSON.stringify(widget.series); + const depsString = JSON.stringify(widget.series); - const loadData = () => { - if (widget.metricType === HEATMAP && metricStore.clickMapSearch) { - const clickFilter = { - value: [metricStore.clickMapSearch], - type: 'CLICK', - operator: 'onSelector', - isEvent: true, - // @ts-ignore - filters: [], - }; - const timeRange = { - rangeValue: dashboardStore.drillDownPeriod.rangeValue, - startDate: dashboardStore.drillDownPeriod.start, - endDate: dashboardStore.drillDownPeriod.end, - }; - const customFilter = { - ...filter, - ...timeRange, - filters: [...sessionStore.userFilter.filters, clickFilter], - }; - debounceClickMapSearch(customFilter); - } else { - const hasStartPoint = - !!widget.startPoint && widget.metricType === USER_PATH; - const onlyFocused = focusedSeries - ? widget.series.filter((s) => s.name === focusedSeries) - : widget.series; - const activeSeries = metricStore.disabledSeries.length - ? onlyFocused.filter( - (s) => !metricStore.disabledSeries.includes(s.name), - ) - : onlyFocused; - const seriesJson = activeSeries.map((s) => s.toJson()); - if (hasStartPoint) { - seriesJson[0].filter.filters.push(widget.startPoint.toJson()); - } - if (widget.metricType === USER_PATH) { - if ( - seriesJson[0].filter.filters[0].value[0] === '' && - widget.data.nodes?.length - ) { - seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name; - } else if ( - seriesJson[0].filter.filters[0].value[0] === '' && - !widget.data.nodes?.length - ) { - // no point requesting if we don't have starting point picked by api - return; - } - } - debounceRequest(widget.metricId, { - ...filter, - series: seriesJson, - page: metricStore.sessionsPage, - limit: metricStore.sessionsPageSize, - }); + const loadData = () => { + if (widget.metricType === HEATMAP && metricStore.clickMapSearch) { + const clickFilter = { + value: [metricStore.clickMapSearch], + type: 'CLICK', + operator: 'onSelector', + isEvent: true, + // @ts-ignore + filters: [], + }; + const timeRange = { + rangeValue: dashboardStore.drillDownPeriod.rangeValue, + startDate: dashboardStore.drillDownPeriod.start, + endDate: dashboardStore.drillDownPeriod.end, + }; + const customFilter = { + ...filter, + ...timeRange, + filters: [...sessionStore.userFilter.filters, clickFilter], + }; + debounceClickMapSearch(customFilter); + } else { + const hasStartPoint = + !!widget.startPoint && widget.metricType === USER_PATH; + const onlyFocused = focusedSeries + ? widget.series.filter((s) => s.name === focusedSeries) + : widget.series; + const activeSeries = metricStore.disabledSeries.length + ? onlyFocused.filter( + (s) => !metricStore.disabledSeries.includes(s.name), + ) + : onlyFocused; + const seriesJson = activeSeries.map((s) => s.toJson()); + if (hasStartPoint) { + seriesJson[0].filter.filters.push(widget.startPoint.toJson()); + } + if (widget.metricType === USER_PATH) { + if ( + seriesJson[0].filter.filters[0].value[0] === '' && + widget.data.nodes?.length + ) { + seriesJson[0].filter.filters[0].value = widget.data.nodes[0].name; + } else if ( + seriesJson[0].filter.filters[0].value[0] === '' && + !widget.data.nodes?.length + ) { + // no point requesting if we don't have starting point picked by api + return; } - }; - useEffect(() => { - metricStore.updateKey('sessionsPage', 1); - loadData(); - }, [ - filter.startTimestamp, - filter.endTimestamp, - filter.filters, - depsString, - metricStore.clickMapSearch, - focusedSeries, - widget.startPoint, - widget.data.nodes, - metricStore.disabledSeries.length, - ]); - useEffect(loadData, [metricStore.sessionsPage]); - useEffect(() => { - if (activeSeries === 'all') { - metricStore.setFocusedSeriesName(null); - } else { - metricStore.setFocusedSeriesName( - seriesOptions.find((option) => option.value === activeSeries)?.label, - false, - ); - } - }, [activeSeries]); - useEffect(() => { - if (focusedSeries) { - setActiveSeries( - seriesOptions.find((option) => option.label === focusedSeries)?.value || - 'all', - ); - } else { - setActiveSeries('all'); - } - }, [focusedSeries]); + } + debounceRequest(widget.metricId, { + ...filter, + series: seriesJson, + page: metricStore.sessionsPage, + limit: metricStore.sessionsPageSize, + }); + } + }; + useEffect(() => { + metricStore.updateKey('sessionsPage', 1); + loadData(); + }, [ + filter.startTimestamp, + filter.endTimestamp, + filter.filters, + depsString, + metricStore.clickMapSearch, + focusedSeries, + widget.startPoint, + widget.data?.nodes, + metricStore.disabledSeries.length, + ]); + useEffect(loadData, [metricStore.sessionsPage]); + useEffect(() => { + if (activeSeries === 'all') { + metricStore.setFocusedSeriesName(null); + } else { + metricStore.setFocusedSeriesName( + seriesOptions.find((option) => option.value === activeSeries)?.label, + false, + ); + } + }, [activeSeries]); + useEffect(() => { + if (focusedSeries) { + setActiveSeries( + seriesOptions.find((option) => option.label === focusedSeries)?.value || + 'all', + ); + } else { + setActiveSeries('all'); + } + }, [focusedSeries]); - const clearFilters = () => { - metricStore.updateKey('sessionsPage', 1); - dashboardStore.resetDrillDownFilter(); - }; + const clearFilters = () => { + metricStore.updateKey('sessionsPage', 1); + dashboardStore.resetDrillDownFilter(); + }; - return ( -
-
-
-
-

- {metricStore.clickMapSearch ? t('Clicks') : t('Sessions')} -

-
- {metricStore.clickMapLabel - ? `on "${metricStore.clickMapLabel}" ` - : null} - {t('between')}{' '} - + return ( +
+
+
+
+

+ {metricStore.clickMapSearch ? t('Clicks') : t('Sessions')} +

+
+ {metricStore.clickMapLabel + ? `on "${metricStore.clickMapLabel}" ` + : null} + {t('between')}{' '} + {startTime} {' '} - {t('and')}{' '} - + {t('and')}{' '} + {endTime} {' '} -
- {hasFilters && ( - - - - )} -
+
+ {hasFilters && ( + + + + )} +
- {hasFilters && widget.metricType === 'table' && ( -
- - {filterText} - -
- )} -
+ {hasFilters && widget.metricType === 'table' && ( +
+ + {filterText} + +
+ )} +
-
- {widget.metricType !== 'table' && widget.metricType !== HEATMAP && ( -
+
+ {widget.metricType !== 'table' && widget.metricType !== HEATMAP && ( +
{t('Filter by Series')} - - - -
- )} -
+ + +
+ )} +
+
-
- - - -
-
- {t('No relevant sessions found for the selected time period')} -
-
- } - show={filteredSessions.sessions.length === 0} - > - {filteredSessions.sessions.map((session: any) => ( - - -
- - ))} +
+ + + +
+
+ {t('No relevant sessions found for the selected time period')} +
+
+ } + show={filteredSessions.sessions.length === 0} + > + {filteredSessions.sessions.map((session: any) => ( + + +
+ + ))} -
-
- {t('Showing')}{' '} - +
+
+ {t('Showing')}{' '} + {(metricStore.sessionsPage - 1) * - metricStore.sessionsPageSize + - 1} + metricStore.sessionsPageSize + + 1} {' '} - {t('to')}{' '} - + {t('to')}{' '} + {(metricStore.sessionsPage - 1) * - metricStore.sessionsPageSize + - filteredSessions.sessions.length} + metricStore.sessionsPageSize + + filteredSessions.sessions.length} {' '} - {t('of')}{' '} - + {t('of')}{' '} + {numberWithCommas(filteredSessions.total)} {' '} - {t('sessions.')} -
- - metricStore.updateKey('sessionsPage', page) - } - limit={metricStore.sessionsPageSize} - debounceRequest={500} - /> -
- - + {t('sessions.')} +
+ + metricStore.updateKey('sessionsPage', page) + } + limit={metricStore.sessionsPageSize} + debounceRequest={500} + />
-
- ); +
+
+
+
+ ); } const getListSessionsBySeries = (data: any, seriesId: any) => { - const arr = data.reduce( - (arr: any, element: any) => { - if (seriesId === 'all') { - const sessionIds = arr.sessions.map((i: any) => i.sessionId); - const sessions = element.sessions.filter( - (i: any) => !sessionIds.includes(i.sessionId), - ); - arr.sessions.push(...sessions); - } else if (element.seriesId === seriesId) { - const sessionIds = arr.sessions.map((i: any) => i.sessionId); - const sessions = element.sessions.filter( - (i: any) => !sessionIds.includes(i.sessionId), - ); - const duplicates = element.sessions.length - sessions.length; - arr.sessions.push(...sessions); - arr.total = element.total - duplicates; - } - return arr; - }, - {sessions: []}, - ); - arr.total = - seriesId === 'all' - ? Math.max(...data.map((i: any) => i.total)) - : data.find((i: any) => i.seriesId === seriesId).total; - return arr; + const arr = data.reduce( + (arr: any, element: any) => { + if (seriesId === 'all') { + const sessionIds = arr.sessions.map((i: any) => i.sessionId); + const sessions = element.sessions.filter( + (i: any) => !sessionIds.includes(i.sessionId), + ); + arr.sessions.push(...sessions); + } else if (element.seriesId === seriesId) { + const sessionIds = arr.sessions.map((i: any) => i.sessionId); + const sessions = element.sessions.filter( + (i: any) => !sessionIds.includes(i.sessionId), + ); + const duplicates = element.sessions.length - sessions.length; + arr.sessions.push(...sessions); + arr.total = element.total - duplicates; + } + return arr; + }, + { sessions: [] }, + ); + arr.total = + seriesId === 'all' + ? Math.max(...data.map((i: any) => i.total)) + : data.find((i: any) => i.seriesId === seriesId).total; + return arr; }; export default observer(WidgetSessions); diff --git a/frontend/app/components/Dashboard/components/WidgetView/WidgetView.tsx b/frontend/app/components/Dashboard/components/WidgetView/WidgetView.tsx index d59e4e32a..b7d03d83e 100644 --- a/frontend/app/components/Dashboard/components/WidgetView/WidgetView.tsx +++ b/frontend/app/components/Dashboard/components/WidgetView/WidgetView.tsx @@ -31,6 +31,7 @@ import CardUserList from '../CardUserList/CardUserList'; import WidgetSessions from '../WidgetSessions'; import WidgetPreview from '../WidgetPreview'; import { useTranslation } from 'react-i18next'; +import { PANEL_SIZES } from 'App/constants/panelSizes'; interface Props { history: any; @@ -183,7 +184,7 @@ function WidgetView({ : 'You have unsaved changes. Are you sure you want to leave?' } /> -
+
diff --git a/frontend/app/components/FFlags/FlagView/FlagView.tsx b/frontend/app/components/FFlags/FlagView/FlagView.tsx index c12b992dd..9dfb5baf2 100644 --- a/frontend/app/components/FFlags/FlagView/FlagView.tsx +++ b/frontend/app/components/FFlags/FlagView/FlagView.tsx @@ -10,6 +10,7 @@ import Multivariant from 'Components/FFlags/NewFFlag/Multivariant'; import { toast } from 'react-toastify'; import RolloutCondition from 'Shared/ConditionSet'; import { useTranslation } from 'react-i18next'; +import { PANEL_SIZES } from "App/constants/panelSizes"; function FlagView({ siteId, fflagId }: { siteId: string; fflagId: string }) { const { t } = useTranslation(); @@ -52,7 +53,7 @@ function FlagView({ siteId, fflagId }: { siteId: string; fflagId: string }) { }; return ( -
+
; if (!current) { return ( -
+
+
diff --git a/frontend/app/components/Kai/KaiChat.tsx b/frontend/app/components/Kai/KaiChat.tsx new file mode 100644 index 000000000..904181460 --- /dev/null +++ b/frontend/app/components/Kai/KaiChat.tsx @@ -0,0 +1,201 @@ +import React from 'react'; +import { useModal } from 'App/components/Modal'; +import { MessagesSquare, Trash } from 'lucide-react'; +import ChatHeader from './components/ChatHeader'; +import { PANEL_SIZES } from 'App/constants/panelSizes'; +import ChatLog from './components/ChatLog'; +import IntroSection from './components/IntroSection'; +import { useQuery } from '@tanstack/react-query'; +import { kaiService } from 'App/services'; +import { toast } from 'react-toastify'; +import { useStore } from 'App/mstore'; +import { observer } from 'mobx-react-lite'; +import { useHistory, useLocation } from 'react-router-dom'; + +function KaiChat() { + const { userStore, projectsStore } = useStore(); + const history = useHistory(); + const [chatTitle, setTitle] = React.useState(null); + const userId = userStore.account.id; + const userLetter = userStore.account.name[0].toUpperCase(); + const { activeSiteId } = projectsStore; + const [section, setSection] = React.useState<'intro' | 'chat'>('intro'); + const [threadId, setThreadId] = React.useState(null); + const [initialMsg, setInitialMsg] = React.useState(null); + const { showModal, hideModal } = useModal(); + const location = useLocation(); + + React.useEffect(() => { + history.replace({ search: '' }); + setThreadId(null); + setSection('intro'); + setInitialMsg(null); + setTitle(null); + }, [activeSiteId, history]); + + const openChats = () => { + showModal( + { + setTitle(title); + setThreadId(threadId); + hideModal(); + }} + />, + { right: true, width: 300 }, + ); + }; + + React.useEffect(() => { + if ( + activeSiteId && + parseInt(activeSiteId, 10) !== + parseInt(location.pathname.split('/')[1], 10) + ) { + return; + } + const params = new URLSearchParams(location.search); + const threadIdFromUrl = params.get('threadId'); + if (threadIdFromUrl) { + setThreadId(threadIdFromUrl); + setSection('chat'); + } + }, []); + + React.useEffect(() => { + if (threadId) { + setSection('chat'); + history.replace({ search: `?threadId=${threadId}` }); + } else { + setTitle(null); + history.replace({ search: '' }); + } + }, [threadId]); + + if (!userId || !activeSiteId) return null; + + const canGoBack = section !== 'intro'; + const goBack = canGoBack + ? () => { + if (section === 'chat') { + setThreadId(null); + setSection('intro'); + } + } + : undefined; + + const onCreate = async (firstMsg?: string) => { + if (firstMsg) { + setInitialMsg(firstMsg); + } + const newThread = await kaiService.createKaiChat(activeSiteId); + if (newThread) { + setThreadId(newThread.toString()); + setSection('chat'); + } else { + toast.error("Something wen't wrong. Please try again later."); + } + }; + return ( +
+
+ +
+ {section === 'intro' ? ( + + ) : ( + + )} +
+
+
+ ); +} + +function ChatsModal({ + onSelect, + projectId, +}: { + onSelect: (threadId: string, title: string) => void; + projectId: string; +}) { + const { + data = [], + isPending, + refetch, + } = useQuery({ + queryKey: ['kai', 'chats', projectId], + queryFn: () => kaiService.getKaiChats(projectId), + staleTime: 1000 * 60, + }); + + const onDelete = async (id: string) => { + try { + await kaiService.deleteKaiChat(projectId, id); + } catch (e) { + toast.error("Something wen't wrong. Please try again later."); + } + refetch(); + }; + return ( +
+
+ + Chats +
+ {isPending ? ( +
Loading chats...
+ ) : ( +
+ {data.map((chat) => ( +
+
+
onSelect(chat.thread_id, chat.title)} + className="cursor-pointer hover:underline truncate" + > + {chat.title} +
+
+
onDelete(chat.thread_id)} + className="cursor-pointer opacity-0 group-hover:opacity-100 rounded-r h-full px-2 flex items-center group-hover:bg-active-blue" + > + +
+
+ ))} +
+ )} +
+ ); +} + +export default observer(KaiChat); diff --git a/frontend/app/components/Kai/KaiService.ts b/frontend/app/components/Kai/KaiService.ts new file mode 100644 index 000000000..045553b4e --- /dev/null +++ b/frontend/app/components/Kai/KaiService.ts @@ -0,0 +1,80 @@ +import AiService from '@/services/AiService'; + +export default class KaiService extends AiService { + getKaiChats = async ( + projectId: string, + ): Promise<{ title: string; thread_id: string }[]> => { + const r = await this.client.get(`/kai/${projectId}/chats`); + if (!r.ok) { + throw new Error('Failed to fetch chats'); + } + const data = await r.json(); + return data; + }; + + deleteKaiChat = async ( + projectId: string, + threadId: string, + ): Promise => { + const r = await this.client.delete(`/kai/${projectId}/chats/${threadId}`); + if (!r.ok) { + throw new Error('Failed to delete chat'); + } + return true; + }; + + getKaiChat = async ( + projectId: string, + threadId: string, + ): Promise< + { + role: string; + content: string; + message_id: any; + duration?: number; + feedback: boolean | null; + }[] + > => { + const r = await this.client.get(`/kai/${projectId}/chats/${threadId}`); + if (!r.ok) { + throw new Error('Failed to fetch chat'); + } + const data = await r.json(); + return data; + }; + + createKaiChat = async (projectId: string): Promise => { + const r = await this.client.get(`/kai/${projectId}/chat/new`); + if (!r.ok) { + throw new Error('Failed to create chat'); + } + const data = await r.json(); + return data; + }; + + feedback = async ( + positive: boolean | null, + messageId: string, + projectId: string, + ) => { + const r = await this.client.post(`/kai/${projectId}/messages/feedback`, { + message_id: messageId, + value: positive, + }); + if (!r.ok) { + throw new Error('Failed to send feedback'); + } + + return await r.json(); + }; + + cancelGeneration = async (projectId: string, threadId: string) => { + const r = await this.client.post(`/kai/${projectId}/cancel/${threadId}`); + if (!r.ok) { + throw new Error('Failed to cancel generation'); + } + + const data = await r.json(); + return data; + }; +} diff --git a/frontend/app/components/Kai/KaiStore.ts b/frontend/app/components/Kai/KaiStore.ts new file mode 100644 index 000000000..6c84d5a62 --- /dev/null +++ b/frontend/app/components/Kai/KaiStore.ts @@ -0,0 +1,256 @@ +import { makeAutoObservable, runInAction } from 'mobx'; +import { BotChunk, ChatManager, Message } from './SocketManager'; +import { kaiService as aiService, kaiService } from 'App/services'; +import { toast } from 'react-toastify'; + +class KaiStore { + chatManager: ChatManager | null = null; + processingStage: BotChunk | null = null; + messages: Message[] = []; + queryText = ''; + loadingChat = false; + replacing = false; + + constructor() { + makeAutoObservable(this); + } + + get lastHumanMessage() { + let msg = null; + let index = null; + for (let i = this.messages.length - 1; i >= 0; i--) { + const message = this.messages[i]; + if (message.isUser) { + msg = message; + index = i; + break; + } + } + return { msg, index }; + } + + get lastKaiMessage() { + let msg = null; + let index = null; + for (let i = this.messages.length - 1; i >= 0; i--) { + const message = this.messages[i]; + if (!message.isUser) { + msg = message; + index = i; + break; + } + } + return { msg, index }; + } + + setQueryText = (text: string) => { + this.queryText = text; + }; + + setLoadingChat = (loading: boolean) => { + this.loadingChat = loading; + }; + + setChatManager = (chatManager: ChatManager) => { + this.chatManager = chatManager; + }; + + setProcessingStage = (stage: BotChunk | null) => { + this.processingStage = stage; + }; + + setMessages = (messages: Message[]) => { + this.messages = messages; + }; + + addMessage = (message: Message) => { + this.messages.push(message); + }; + + editMessage = (text: string) => { + this.setQueryText(text); + this.setReplacing(true); + }; + + replaceAtIndex = (message: Message, index: number) => { + const messages = [...this.messages]; + messages[index] = message; + this.setMessages(messages); + }; + + deleteAtIndex = (indexes: number[]) => { + if (!indexes.length) return; + const messages = this.messages.filter((_, i) => !indexes.includes(i)); + runInAction(() => { + this.messages = messages; + }); + }; + + getChat = async (projectId: string, threadId: string) => { + this.setLoadingChat(true); + try { + const res = await aiService.getKaiChat(projectId, threadId); + if (res && res.length) { + this.setMessages( + res.map((m) => { + const isUser = m.role === 'human'; + return { + text: m.content, + isUser: isUser, + messageId: m.message_id, + duration: m.duration, + feedback: m.feedback, + }; + }), + ); + } + } catch (e) { + console.error(e); + toast.error("Couldn't load chat history. Please try again later."); + } finally { + this.setLoadingChat(false); + } + }; + + createChatManager = ( + settings: { projectId: string; threadId: string }, + setTitle: (title: string) => void, + initialMsg: string | null, + ) => { + const token = kaiService.client.getJwt(); + if (!token) { + console.error('No token found'); + return; + } + this.chatManager = new ChatManager({ ...settings, token }); + this.chatManager.setOnMsgHook({ + msgCallback: (msg) => { + if ('state' in msg) { + if (msg.state === 'running') { + this.setProcessingStage({ + content: 'Processing your request...', + stage: 'chart', + messageId: Date.now().toPrecision(), + duration: msg.start_time ? Date.now() - msg.start_time : 0, + }); + } else { + this.setProcessingStage(null); + } + } else { + if (msg.stage === 'start') { + this.setProcessingStage({ + ...msg, + content: 'Processing your request...', + }); + } + if (msg.stage === 'chart') { + this.setProcessingStage(msg); + } + if (msg.stage === 'final') { + const msgObj = { + text: msg.content, + isUser: false, + messageId: msg.messageId, + duration: msg.duration, + feedback: null, + }; + this.addMessage(msgObj); + this.setProcessingStage(null); + } + } + }, + titleCallback: setTitle, + }); + + if (initialMsg) { + this.sendMessage(initialMsg); + } + }; + + setReplacing = (replacing: boolean) => { + this.replacing = replacing; + }; + + sendMessage = (message: string) => { + if (this.chatManager) { + this.chatManager.sendMessage(message, this.replacing); + } + if (this.replacing) { + console.log( + this.lastHumanMessage, + this.lastKaiMessage, + 'replacing these two', + ); + const deleting = []; + if (this.lastHumanMessage.index !== null) { + deleting.push(this.lastHumanMessage.index); + } + if (this.lastKaiMessage.index !== null) { + deleting.push(this.lastKaiMessage.index); + } + this.deleteAtIndex(deleting); + this.setReplacing(false); + } + this.addMessage({ + text: message, + isUser: true, + messageId: Date.now().toString(), + feedback: null, + duration: 0, + }); + }; + + sendMsgFeedback = ( + feedback: string, + messageId: string, + projectId: string, + ) => { + this.messages = this.messages.map((msg) => { + if (msg.messageId === messageId) { + return { + ...msg, + feedback: feedback === 'like' ? true : false, + }; + } + return msg; + }); + aiService + .feedback(feedback === 'like', messageId, projectId) + .then(() => { + toast.success('Feedback saved.'); + }) + .catch((e) => { + console.error(e); + toast.error('Failed to send feedback. Please try again later.'); + }); + }; + + cancelGeneration = async (settings: { + projectId: string; + userId: string; + threadId: string; + }) => { + try { + await kaiService.cancelGeneration(settings.projectId, settings.threadId); + this.setProcessingStage(null); + } catch (e) { + console.error(e); + toast.error( + 'Failed to cancel the response generation, please try again later.', + ); + } + }; + + clearChat = () => { + this.setMessages([]); + this.setProcessingStage(null); + this.setLoadingChat(false); + this.setQueryText(''); + if (this.chatManager) { + this.chatManager.disconnect(); + this.chatManager = null; + } + }; +} + +export const kaiStore = new KaiStore(); diff --git a/frontend/app/components/Kai/SocketManager.ts b/frontend/app/components/Kai/SocketManager.ts new file mode 100644 index 000000000..e34ccc155 --- /dev/null +++ b/frontend/app/components/Kai/SocketManager.ts @@ -0,0 +1,120 @@ +import io from 'socket.io-client'; + +export class ChatManager { + socket: ReturnType; + threadId: string | null = null; + + constructor({ + projectId, + threadId, + token, + }: { + projectId: string; + threadId: string; + token: string; + }) { + this.threadId = threadId; + const urlObject = new URL(window.env.API_EDP || window.location.origin); + const socket = io(`${urlObject.origin}/kai/chat`, { + transports: ['websocket'], + path: '/kai/chat/socket.io', + autoConnect: true, + reconnection: true, + reconnectionAttempts: 5, + reconnectionDelay: 1000, + reconnectionDelayMax: 5000, + withCredentials: true, + multiplex: true, + query: { + project_id: projectId, + thread_id: threadId, + timezone: Intl.DateTimeFormat().resolvedOptions().timeZone, + }, + auth: { + token: `Bearer ${token}`, + }, + }); + socket.on('connect', () => { + console.log('Connected to server'); + }); + socket.on('disconnect', () => { + console.log('Disconnected from server'); + }); + socket.on('error', (err) => { + console.error('Socket error:', err); + }); + + this.socket = socket; + } + + reconnect = () => { + this.socket.connect(); + }; + + sendMessage = (message: string, isReplace = false) => { + if (!this.socket.connected) { + this.reconnect(); + setTimeout(() => { + this.sendMessage(message, isReplace); + }, 500); + } else { + this.socket.emit( + 'message', + JSON.stringify({ + message, + threadId: this.threadId, + replace: isReplace, + }), + ); + } + }; + + setOnMsgHook = ({ + msgCallback, + titleCallback, + }: { + msgCallback: ( + msg: BotChunk | { state: string; type: 'state'; start_time?: number }, + ) => void; + titleCallback: (title: string) => void; + }) => { + this.socket.on('chunk', (msg: BotChunk) => { + msgCallback(msg); + }); + this.socket.on('title', (msg: { content: string }) => { + titleCallback(msg.content); + }); + this.socket.on( + 'state', + (state: { message: 'idle' | 'running'; start_time: number }) => { + msgCallback({ + state: state.message, + type: 'state', + start_time: state.start_time, + }); + }, + ); + }; + + disconnect = () => { + this.socket.disconnect(); + }; +} + +export interface BotChunk { + stage: 'start' | 'chart' | 'final' | 'title'; + content: string; + messageId: string; + duration?: number; +} +export interface Message { + text: string; + isUser: boolean; + messageId: string; + duration?: number; + feedback: boolean | null; +} + +export interface SentMessage extends Message { + replace: boolean; +} diff --git a/frontend/app/components/Kai/components/ChatHeader.tsx b/frontend/app/components/Kai/components/ChatHeader.tsx new file mode 100644 index 000000000..756aa9767 --- /dev/null +++ b/frontend/app/components/Kai/components/ChatHeader.tsx @@ -0,0 +1,54 @@ +import React from 'react'; +import { Icon } from 'UI'; +import { MessagesSquare, ArrowLeft } from 'lucide-react'; + +function ChatHeader({ + openChats = () => {}, + goBack, + chatTitle, +}: { + goBack?: () => void; + openChats?: () => void; + chatTitle: string | null; +}) { + return ( +
+
+ {goBack ? ( +
+ +
Back
+
+ ) : null} +
+
+ {chatTitle ? ( +
{chatTitle}
+ ) : ( + <> + +
Kai
+ + )} +
+
+ +
Chats
+
+
+ ); +} + +export default ChatHeader; diff --git a/frontend/app/components/Kai/components/ChatInput.tsx b/frontend/app/components/Kai/components/ChatInput.tsx new file mode 100644 index 000000000..94bfeb89f --- /dev/null +++ b/frontend/app/components/Kai/components/ChatInput.tsx @@ -0,0 +1,55 @@ +import React from 'react' +import { Button, Input } from "antd"; +import { SendHorizonal, OctagonX } from "lucide-react"; +import { kaiStore } from "../KaiStore"; +import { observer } from "mobx-react-lite"; + +function ChatInput({ isLoading, onSubmit, threadId }: { isLoading?: boolean, onSubmit: (str: string) => void, threadId: string }) { + const inputRef = React.useRef(null); + const inputValue = kaiStore.queryText; + const isProcessing = kaiStore.processingStage !== null + const setInputValue = (text: string) => { + kaiStore.setQueryText(text) + } + + const submit = () => { + if (isProcessing) { + const settings = { projectId: '2325', userId: '0', threadId, }; + void kaiStore.cancelGeneration(settings) + } else { + if (inputValue.length > 0) { + onSubmit(inputValue) + setInputValue('') + } + } + } + + React.useEffect(() => { + if (inputRef.current) { + inputRef.current.focus() + } + }, [inputValue]) + + return ( + setInputValue(e.target.value)} + suffix={ + - {props.time ? ( -
- {shortDurationFromMs(props.time)} -
- ) : null}
); diff --git a/frontend/app/components/shared/DevTools/LongTaskPanel/LongTaskPanel.tsx b/frontend/app/components/shared/DevTools/LongTaskPanel/LongTaskPanel.tsx new file mode 100644 index 000000000..c021afdef --- /dev/null +++ b/frontend/app/components/shared/DevTools/LongTaskPanel/LongTaskPanel.tsx @@ -0,0 +1,265 @@ +import React from 'react'; +import { observer } from 'mobx-react-lite'; +import { useTranslation } from 'react-i18next'; +import { Input } from 'antd'; +import { VList, VListHandle } from 'virtua'; +import { PlayerContext } from 'App/components/Session/playerContext'; +import JumpButton from '../JumpButton'; +import { useRegExListFilterMemo } from '../useListFilter'; +import BottomBlock from '../BottomBlock'; +import { NoContent, Icon } from 'UI'; +import { InfoCircleOutlined } from '@ant-design/icons'; +import { Segmented, Select, Tag } from 'antd'; +import { LongAnimationTask } from './type'; +import Script from './Script'; +import TaskTimeline from './TaskTimeline'; +import { Hourglass } from 'lucide-react'; + +interface Row extends LongAnimationTask { + time: number; +} + +const TABS = { + all: 'all', + blocking: 'blocking', +}; + +const SORT_BY = { + timeAsc: 'timeAsc', + blocking: 'blockingDesc', + duration: 'durationDesc', +}; + +function LongTaskPanel() { + const { t } = useTranslation(); + const [tab, setTab] = React.useState(TABS.all); + const [sortBy, setSortBy] = React.useState(SORT_BY.timeAsc); + const _list = React.useRef(null); + const { player, store } = React.useContext(PlayerContext); + const [searchValue, setSearchValue] = React.useState(''); + + const { currentTab, tabStates } = store.get(); + const longTasks = tabStates[currentTab]?.longTaskList || []; + + const filteredList = useRegExListFilterMemo( + longTasks, + (task: LongAnimationTask) => [ + task.name, + task.scripts.map((script) => script.name).join(','), + task.scripts.map((script) => script.sourceURL).join(','), + ], + searchValue, + ); + + const onFilterChange = (e: React.ChangeEvent) => { + const value = e.target.value; + setSearchValue(value); + }; + + const onRowClick = (time: number) => { + player.jump(time); + }; + + const rows: Row[] = React.useMemo(() => { + let rowMap = filteredList.map((task) => ({ + ...task, + time: task.time ?? task.startTime, + })); + if (tab === 'blocking') { + rowMap = rowMap.filter((task) => task.blockingDuration > 0); + } + switch (sortBy) { + case SORT_BY.blocking: + rowMap = rowMap.sort((a, b) => b.blockingDuration - a.blockingDuration); + break; + case SORT_BY.duration: + rowMap = rowMap.sort((a, b) => b.duration - a.duration); + break; + default: + rowMap = rowMap.sort((a, b) => a.time - b.time); + } + return rowMap; + }, [filteredList.length, tab, sortBy]); + + const blockingTasks = React.useMemo(() => { + let blockingAmount = 0; + for (const task of longTasks) { + if (task.blockingDuration > 0) { + blockingAmount++; + } + } + return blockingAmount; + }, [longTasks.length]); + + return ( + + +
+ + {t('Long Tasks')} + +
+
+ + {t('Blocking')} ({blockingTasks}) +
+ ), + value: 'blocking', + }, + ]} + /> +