Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
4957701
feat: implementing-environment-metrics-endpoint
Zaimwa9 May 6, 2025
0d765d2
feat: gathering-segments-and-features-metrics-from-subqueries
Zaimwa9 May 6, 2025
8e66263
feat: implemented-queries-to-fetch-env-metrics
Zaimwa9 May 6, 2025
50c54f8
feat: handled-change-requests-conditionally
Zaimwa9 May 6, 2025
9cded03
feat: implemented-types-and-tests-on-per-entity-object-model
Zaimwa9 May 7, 2025
f4e865d
feat: redesigned-payload-as-flat-list-of-metrics
Zaimwa9 May 7, 2025
afbd158
feat: improved-typing
Zaimwa9 May 7, 2025
7c6627a
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 7, 2025
7830c40
feat: removed-unused-env-metrics-entities
Zaimwa9 May 7, 2025
7955347
Merge branch 'feat/report-environment-metrics' of github.com:Flagsmit…
Zaimwa9 May 7, 2025
74ce33a
feat: added-edge-identity-overrides-metric
Zaimwa9 May 8, 2025
f219849
feat: updated-metrics-tests
Zaimwa9 May 8, 2025
559cee7
feat: added-dynamodb-wrapper-test
Zaimwa9 May 8, 2025
1837fe6
Merge branch 'main' of github.com:Flagsmith/flagsmith into feat/repor…
Zaimwa9 May 12, 2025
4b28de5
feat: excluded-open-change-requests-from-features-ids
Zaimwa9 May 13, 2025
b83a2d9
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 13, 2025
fab7a6f
feat: fixed-types
Zaimwa9 May 13, 2025
9a2c453
Merge branch 'feat/report-environment-metrics' of github.com:Flagsmit…
Zaimwa9 May 13, 2025
d910a6c
feat: fixed-segment-grouping-query
Zaimwa9 May 13, 2025
3ab02da
feat: improved-identity-sql-metrics
Zaimwa9 May 13, 2025
b941f9c
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 13, 2025
68639e1
feat: reverted-to-feature-segment-ids-isolation
Zaimwa9 May 13, 2025
831a7e2
feat: pass-with-workflows-to-segments
Zaimwa9 May 14, 2025
5ca85ff
feat: reviewed-wording-titles
Zaimwa9 May 14, 2025
e63250d
Update api/environments/models.py
Zaimwa9 May 15, 2025
412e9fa
feat: use-existing-get-live-features-states
Zaimwa9 May 16, 2025
2b9a54c
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] May 16, 2025
05f13a5
feat: refactored-features-aggregation-result
Zaimwa9 May 19, 2025
bd33c76
feat: simplified-and-removed-base-metrics-views
Zaimwa9 May 28, 2025
5bebf9d
feat: limited-metrics-to-view-environment-permissions
Zaimwa9 May 28, 2025
361b179
feat: fixed-conflicts
Zaimwa9 May 29, 2025
dfdd06d
feat: moved-logic-to-metrics-service
Zaimwa9 May 30, 2025
50287e1
feat: reworked-models-functions-using-get-live-features
Zaimwa9 May 30, 2025
79346f8
feat: added-documentation-and-removed-dead-code-path
Zaimwa9 Jun 2, 2025
852ca8f
feat: removed-redundant-code
Zaimwa9 Jun 6, 2025
367eafa
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Jun 6, 2025
24fdc04
feat: added-extra-fake-versions-of-feature-states-in-test
Zaimwa9 Jun 6, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions api/app/settings/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@
"projects.tags",
"api_keys",
"webhooks",
"metrics",
"onboarding",
# 2FA
"custom_auth.mfa.trench",
Expand Down
9 changes: 9 additions & 0 deletions api/environments/dynamodb/wrappers/identity_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,3 +193,12 @@ def get_segment_ids(
return [segment.id for segment in segments]

return []

def get_identity_overrides_count(self, environment_api_key: str) -> int:
return sum(
len(identity["identity_features"])
for identity in self.iter_all_items_paginated(
environment_api_key=environment_api_key,
overrides_only=True,
)
)
82 changes: 81 additions & 1 deletion api/environments/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@
import typing
import uuid
from copy import deepcopy
from typing import TYPE_CHECKING, Literal

from django.conf import settings
from django.contrib.contenttypes.fields import GenericRelation
from django.core.cache import caches
from django.db import models
from django.db.models import Prefetch, Q
from django.db.models import Max, Prefetch, Q, QuerySet
from django.utils import timezone
from django_lifecycle import ( # type: ignore[import-untyped]
AFTER_CREATE,
Expand Down Expand Up @@ -57,6 +58,10 @@
)
from webhooks.models import AbstractBaseExportableWebhookModel

if TYPE_CHECKING:
from features.workflows.core.models import ChangeRequest


logger = logging.getLogger(__name__)

environment_cache = caches[settings.ENVIRONMENT_CACHE_NAME]
Expand Down Expand Up @@ -196,6 +201,10 @@ def __str__(self): # type: ignore[no-untyped-def]
def natural_key(self): # type: ignore[no-untyped-def]
return (self.api_key,)

@property
def is_workflow_enabled(self) -> bool:
return self.minimum_change_request_approvals is not None

def clone(
self,
name: str,
Expand Down Expand Up @@ -358,6 +367,77 @@ def get_feature_state(
)
)

def get_identity_overrides_queryset(self) -> QuerySet[FeatureState]:
ids = self._get_active_feature_states_ids(
"identity_id",
{"identity__isnull": False, "feature_segment__isnull": True},
)
result: QuerySet[FeatureState] = FeatureState.objects.filter(id__in=ids)
return result

def _get_active_feature_states_ids(
self,
extra_group_by_fields: Literal["identity_id"] | None = None,
filter_kwargs: dict[str, typing.Any] | None = None,
) -> list[int]:
base_qs = FeatureState.objects.get_live_feature_states(
environment=self,
**(filter_kwargs or {}),
)

group_fields = ["feature_id"]
if extra_group_by_fields is not None:
group_fields.append(extra_group_by_fields)

return list(
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you share the sql query and query plan generated by this

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

HashAggregate  (cost=10.49..10.76 rows=27 width=12)
  Output: max(features_featurestate.id), features_featurestate.feature_id, features_featurestate.identity_id
  Group Key: features_featurestate.feature_id, features_featurestate.identity_id
  ->  Hash Anti Join  (cost=1.40..10.11 rows=51 width=12)
        Output: features_featurestate.feature_id, features_featurestate.identity_id, features_featurestate.id
        Hash Cond: (features_featurestate.change_request_id = u0.id)
        ->  Seq Scan on public.features_featurestate  (cost=0.00..8.02 rows=54 width=16)
              Output: features_featurestate.id, features_featurestate.feature_id, features_featurestate.identity_id, features_featurestate.change_request_id
              Filter: ((features_featurestate.deleted_at IS NULL) AND ((features_featurestate.live_from IS NULL) OR (features_featurestate.live_from <= '2025-05-15 20:19:24.85323+00'::timestamp with time zone)) AND (features_featurestate.environment_id = 9))
        ->  Hash  (cost=1.38..1.38 rows=2 width=4)
              Output: u0.id
              ->  Seq Scan on public.workflows_core_changerequest u0  (cost=0.00..1.38 rows=2 width=4)
                    Output: u0.id
                    Filter: (u0.committed_at IS NULL)

My sample is relatively small but:

  • No N+1 pattern identified
  • grouping feature_state.id, identity_id looks correct
  • we are doing a seq_scan on features_featurestate, which locally is ok, on prod it can be too heavy

It might be worth to add an index on (environment_id, live_from, deleted_at) wdyt ?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

To go a bit further. Once completely validated, I think the metrics could be cached and invalidated on flags update etc, a bit overkill right now imo (it is controlled by a flag in the frontend for now).
Besides some pages where the code hasn't been migrated yet, it's cached with RTK and invalidated on actions related to those (not identity for now)

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you also share the SQL query?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, here

EXPLAIN (
	ANALYZE,
	BUFFERS
)
SELECT
	MAX("features_featurestate"."id") AS "id"
FROM
	"features_featurestate"
WHERE
	(
		"features_featurestate"."deleted_at" IS NULL
		AND "features_featurestate"."deleted_at" IS NULL
		AND "features_featurestate"."environment_id" = 8
		AND (
			"features_featurestate"."environment_feature_version_id" IN (
				'36e550eb-dd3e-4923-ae00-48d910278f1a',
				'9eb662ea-0db0-46cd-871c-29356fe4b833',
				'3a83a033-afc3-4bef-9b49-343ecaf857b7'
			)
			OR "features_featurestate"."identity_id" IS NOT NULL
		)
		AND "features_featurestate"."feature_segment_id" IS NULL
		AND "features_featurestate"."identity_id" IS NULL
		AND NOT EXISTS (
			SELECT
				1 AS "a"
			FROM
				"workflows_core_changerequest" U0
			WHERE
				(
					U0."committed_at" IS NULL
					AND U0."id" = ("features_featurestate"."change_request_id")
				)
			LIMIT
				1
		)
	)
GROUP BY
	"features_featurestate"."feature_id"

base_qs.values(*group_fields)
.annotate(id=Max("id"))
.values_list("id", flat=True)
)

def get_features_metrics_queryset(self) -> QuerySet[FeatureState]:
ids = self._get_active_feature_states_ids(
None,
{"identity__isnull": True, "feature_segment__isnull": True},
)
result: QuerySet[FeatureState] = FeatureState.objects.filter(id__in=ids)
return result

def _get_latest_segment_state_ids_subquery(self) -> list[int]:
feature_states_qs = FeatureState.objects.get_live_feature_states(
environment=self,
additional_filters=Q(
identity_id__isnull=True,
feature_segment_id__isnull=False,
),
).values_list("id", flat=True)

return list(feature_states_qs)

def get_segment_metrics_queryset(self) -> QuerySet[FeatureState]:
ids = self._get_latest_segment_state_ids_subquery()
result: QuerySet[FeatureState] = FeatureState.objects.filter(id__in=ids)
return result

def get_change_requests_metrics_queryset(self) -> QuerySet["ChangeRequest"]:
from features.workflows.core.models import ChangeRequest

result: QuerySet["ChangeRequest"] = ChangeRequest.objects.filter(
environment=self,
committed_at__isnull=True,
deleted_at__isnull=True,
)
return result

def get_scheduled_metrics_queryset(self) -> QuerySet[FeatureState]:
result: QuerySet[FeatureState] = FeatureState.objects.filter(
environment=self,
identity_id__isnull=True,
feature_segment__isnull=True,
live_from__gt=timezone.now(),
)
return result

@staticmethod
def is_bad_key(environment_key: str) -> bool:
return (
Expand Down
2 changes: 1 addition & 1 deletion api/environments/permissions/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def perform_update(self, serializer): # type: ignore[no-untyped-def]

class UserPermissionGroupEnvironmentPermissionsViewSet(viewsets.ModelViewSet): # type: ignore[type-arg]
pagination_class = None
permission_classes = [IsAuthenticated, NestedEnvironmentPermissions]
permission_classes = [IsAuthenticated]

def get_queryset(self): # type: ignore[no-untyped-def]
if getattr(self, "swagger_fake_view", False):
Expand Down
10 changes: 9 additions & 1 deletion api/environments/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,12 @@
UserEnvironmentPermissionsViewSet,
UserPermissionGroupEnvironmentPermissionsViewSet,
)
from .views import EnvironmentAPIKeyViewSet, EnvironmentViewSet, WebhookViewSet
from .views import (
EnvironmentAPIKeyViewSet,
EnvironmentMetricsViewSet,
EnvironmentViewSet,
WebhookViewSet,
)

router = routers.DefaultRouter()
router.register(r"", EnvironmentViewSet, basename="environment")
Expand All @@ -46,6 +51,9 @@
environments_router.register(
r"webhooks", WebhookViewSet, basename="environment-webhooks"
)
environments_router.register(
r"metrics", EnvironmentMetricsViewSet, basename="environment-metrics"
)
environments_router.register(
r"featurestates",
EnvironmentFeatureStateViewSet,
Expand Down
24 changes: 23 additions & 1 deletion api/environments/views.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from typing import Generic, Type, TypeVar
from typing import Any, Generic, Type, TypeVar

from common.environments.permissions import (
TAG_SUPPORTED_PERMISSIONS,
Expand All @@ -17,6 +17,7 @@
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.serializers import BaseSerializer
from rest_framework.viewsets import GenericViewSet

from core.models import AbstractBaseExportableModel
from environments.permissions.permissions import (
Expand All @@ -30,6 +31,8 @@
disable_v2_versioning,
enable_v2_versioning,
)
from metrics.metrics_service import EnvironmentMetricsService
from metrics.serializers import EnvironmentMetricsSerializer
from permissions.permissions_calculator import get_environment_permission_data
from permissions.serializers import (
PermissionModelSerializer,
Expand Down Expand Up @@ -358,3 +361,22 @@ class EnvironmentAPIKeyViewSet(
pagination_class = None
permission_classes = [IsAuthenticated, EnvironmentAdminPermission]
model_class: Type[EnvironmentAPIKey] = EnvironmentAPIKey


class EnvironmentMetricsViewSet(GenericViewSet[Environment]):
permission_classes = [IsAuthenticated, EnvironmentPermissions]
lookup_field = "api_key"
lookup_url_kwarg = "environment_api_key"
serializer_class: type[BaseSerializer[Any]] = EnvironmentMetricsSerializer
queryset = Environment.objects.all()

@swagger_auto_schema( # type: ignore[misc]
operation_description="Get metrics for this environment.",
responses={200: openapi.Response("Metrics", EnvironmentMetricsSerializer)},
)
def list(self, request: Request, *args: Any, **kwargs: Any) -> Response:
environment: Environment = self.get_object()
metrics_service = EnvironmentMetricsService(environment)
metrics = metrics_service.get_metrics_payload()
serializer = self.get_serializer({"metrics": metrics})
return Response(serializer.data, status=status.HTTP_200_OK)
Empty file added api/metrics/__init__.py
Empty file.
5 changes: 5 additions & 0 deletions api/metrics/apps.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from django.apps import AppConfig


class MetricsAppConfig(AppConfig):
name = "metrics"
56 changes: 56 additions & 0 deletions api/metrics/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
from metrics.types import EnvMetricsEntities, EnvMetricsName, MetricDefinition

TOTAL_FEATURES: MetricDefinition = {
"name": EnvMetricsName.TOTAL_FEATURES,
"description": "Total features",
"entity": EnvMetricsEntities.FEATURES,
"rank": 1,
}

ENABLED_FEATURES: MetricDefinition = {
"name": EnvMetricsName.ENABLED_FEATURES,
"description": "Features enabled",
"entity": EnvMetricsEntities.FEATURES,
"rank": 2,
}

SEGMENT_OVERRIDES: MetricDefinition = {
"name": EnvMetricsName.SEGMENT_OVERRIDES,
"description": "Segment overrides",
"entity": EnvMetricsEntities.SEGMENTS,
"rank": 3,
}

IDENTITY_OVERRIDES: MetricDefinition = {
"name": EnvMetricsName.IDENTITY_OVERRIDES,
"description": "Identity overrides",
"entity": EnvMetricsEntities.IDENTITIES,
"rank": 4,
}

OPEN_CHANGE_REQUESTS: MetricDefinition = {
"name": EnvMetricsName.OPEN_CHANGE_REQUESTS,
"description": "Open change requests",
"entity": EnvMetricsEntities.WORKFLOWS,
"rank": 5,
}

TOTAL_SCHEDULED_CHANGES: MetricDefinition = {
"name": EnvMetricsName.TOTAL_SCHEDULED_CHANGES,
"description": "Scheduled changes",
"entity": EnvMetricsEntities.WORKFLOWS,
"rank": 6,
}


DEFAULT_METRIC_DEFINITIONS: list[MetricDefinition] = [
TOTAL_FEATURES,
ENABLED_FEATURES,
SEGMENT_OVERRIDES,
IDENTITY_OVERRIDES,
]

WORKFLOW_METRIC_DEFINITIONS: list[MetricDefinition] = [
OPEN_CHANGE_REQUESTS,
TOTAL_SCHEDULED_CHANGES,
]
Loading
Loading