Remove logging for paid clients (#1563)

This commit is contained in:
Sarthak Agarwal 2025-04-29 20:13:44 +05:30 committed by GitHub
parent b41ab427b5
commit 076c5543db
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 65 additions and 15 deletions

View file

@ -1,9 +1,38 @@
from django.db.models.functions import Now
import sentry_sdk
from ninja.errors import HttpError
from ninja.security import HttpBearer
from authapp.auth_utils import hash_api_key, instance_for_api_key
from authapp.models import CFAPIKeys
from authapp.models import CFAPIKeys, Subscriptions
from django.db.models.functions import Now
async def check_subscription_status(user_id, tier):
"""Check if a user has a premium subscription that doesn't require feature logging.
Args:
user_id: The ID of the user to check
tier: The user's tier if already available
Returns:
bool: False if features should not be logged (premium user), True otherwise
"""
# If tier is already set, no need to check subscription
if tier is not None:
return False
try:
subscription = await Subscriptions.objects.filter(user_id=user_id).afirst()
if subscription and subscription.plan_type.lower() in ["pro", "enterprise"]:
# Premium users for CF- don't log features
return False
except Exception as e:
print(f"Error checking subscription: {e!s}")
sentry_sdk.capture_exception(e)
# Default to not logging
return False
return True
class AuthBearer(HttpBearer):
@ -20,7 +49,11 @@ class AuthBearer(HttpBearer):
raise HttpError(403, "Invalid API key")
request.user = api_key_instance.user_id
request.tier = api_key_instance.tier
request.should_log_features = await check_subscription_status(user_id=request.user, tier=request.tier)
return token
print("THIS SHOULD NOT HAPPEN! More than one users found in the db with the same api key!")
raise HttpError(403, "Invalid API key")
except CFAPIKeys.DoesNotExist:

View file

@ -31,3 +31,24 @@ class CFAPIKeys(models.Model):
class Meta:
managed = False
db_table = "cf_api_keys"
class Subscriptions(models.Model):
id = models.CharField(primary_key=True, max_length=36)
user_id = models.TextField(unique=True)
stripe_customer_id = models.TextField(null=True, blank=True, unique=True)
stripe_subscription_id = models.TextField(null=True, blank=True)
plan_type = models.TextField()
optimizations_used = models.IntegerField(default=0)
optimizations_limit = models.IntegerField()
subscription_status = models.TextField()
current_period_start = models.DateTimeField(null=True, blank=True)
current_period_end = models.DateTimeField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
cancel_at_period_end = models.BooleanField(default=False)
cancellation_request_date = models.DateTimeField(null=True, blank=True)
class Meta:
managed = False
db_table = "subscriptions"

View file

@ -4,9 +4,9 @@ import datetime as dt
import logging
from asyncio import Semaphore
from aiservice.common_utils import validate_trace_id
from ninja import NinjaAPI, Schema
from aiservice.common_utils import validate_trace_id
from log_features.models import OptimizationFeatures
features_api = NinjaAPI(urls_namespace="log_features")
@ -184,7 +184,7 @@ class LoggingErrorResponseSchema(Schema):
@features_api.post("/", response={200: None, 500: LoggingErrorResponseSchema})
async def log_features_cli(request, data: LoggingSchema):
try:
if request.tier is None:
if hasattr(request, "should_log_features") and request.should_log_features:
await log_features(
trace_id=data.trace_id,
user_id=request.user,

View file

@ -197,7 +197,7 @@ async def optimize(request, data: OptimizeSchema) -> tuple[int, OptimizeResponse
data.source_code, traced_optimizations
)
if request.tier is None:
if hasattr(request, "should_log_features") and request.should_log_features:
await log_features(
trace_id=data.trace_id,
user_id=request.user,

View file

@ -35,7 +35,6 @@ if TYPE_CHECKING:
ChatCompletionToolMessageParam,
)
from aiservice.models.aimodels import LLM
optimize_line_profiler_api = NinjaAPI(urls_namespace="optimize-line-profiler")
@ -205,7 +204,7 @@ async def optimize(request, data: OptimizeSchemaLP) -> tuple[int, OptimizeRespon
data.source_code, traced_optimizations
)
if request.tier is None:
if hasattr(request, "should_log_features") and request.should_log_features:
await log_features(
trace_id=data.trace_id,
user_id=request.user,

View file

@ -8,14 +8,13 @@ from pathlib import Path
from typing import SupportsIndex
import isort
from ninja import NinjaAPI, Schema
from pydantic import model_validator
from aiservice.common_utils import parse_python_version
from aiservice.env_specific import create_openai_client, debug_log_sensitive_data
from aiservice.models.aimodels import EXECUTE_MODEL, EXPLAIN_MODEL, LLM, PLAN_MODEL
from aiservice.models.functions_to_optimize import FunctionToOptimize
from log_features.log_features import log_features
from ninja import NinjaAPI, Schema
from pydantic import model_validator
from testgen.instrumentation.edit_generated_test import parse_module_to_cst, replace_definition_with_import
from testgen.instrumentation.instrument_new_tests import instrument_test_source
@ -373,9 +372,7 @@ from aiservice.analytics.posthog import ph
@testgen_api.post(
"/", response={200: TestGenResponseSchema, 400: TestGenErrorResponseSchema, 500: TestGenErrorResponseSchema}
)
async def testgen(
request, data: TestGenSchema
) -> tuple[int, TestGenResponseSchema | TestGenErrorResponseSchema]:
async def testgen(request, data: TestGenSchema) -> tuple[int, TestGenResponseSchema | TestGenErrorResponseSchema]:
ph(request.user, "aiservice-testgen-called")
if data.test_framework not in ["unittest", "pytest"]:
return 400, TestGenErrorResponseSchema(error="Invalid test framework. We only support unittest and pytest.")
@ -448,7 +445,7 @@ async def testgen(
logging.exception(e)
ph(request.user, "aiservice-testgen-test-generation-failed", properties={"error": str(e)})
return 500, TestGenErrorResponseSchema(error="Error generating tests. Internal server error.")
if request.tier is None:
if hasattr(request, "should_log_features") and request.should_log_features:
await log_features(
trace_id=data.trace_id,
user_id=request.user,

View file

@ -460,7 +460,7 @@ async def testgen(
logging.exception(e)
ph(request.user, "aiservice-testgen-test-generation-failed", properties={"error": str(e)})
return 500, TestGenErrorResponseSchema(error="Error generating tests. Internal server error.")
if request.tier is None:
if hasattr(request, "should_log_features") and request.should_log_features:
# TODO: Update log features with perf instrumented tests
await log_features(
trace_id=data.trace_id,