Skip to content

Commit

Permalink
GPU scores to filter endpoint + stats
Browse files Browse the repository at this point in the history
  • Loading branch information
cryptobench committed Jun 17, 2024
1 parent be28bea commit 6fad113
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def list_provider_scores(request, network: str = Query('polygon', description="T
return JsonResponse({"error": "Data not available"}, status=503)


from api.models import Provider, CpuBenchmark, NodeStatusHistory, TaskCompletion, BlacklistedProvider, BlacklistedOperator, MemoryBenchmark, DiskBenchmark, NetworkBenchmark, PingResult
from api.models import Provider, CpuBenchmark, NodeStatusHistory, TaskCompletion, BlacklistedProvider, BlacklistedOperator, MemoryBenchmark, DiskBenchmark, NetworkBenchmark, PingResult, GPUTask
from api.scoring import calculate_uptime, penalty_weight
from django.db.models import Subquery, OuterRef

Expand All @@ -96,6 +96,8 @@ def filter_providers(
minProviderAge: Optional[int] = Query(None, description="Minimum number of days since provider creation. This filter helps exclude newer providers that may show a high uptime percentage simply because they've been operational for a short period, such as a provider with 99% uptime over two days, which may not be indicative of long-term reliability for requestors looking to run long-running services."),
minUptime: Optional[float] = Query(None, description="Minimum uptime percentage"),
maxUptime: Optional[float] = Query(None, description="Maximum uptime percentage"),
minGPUScore: Optional[float] = Query(None, description="Minimum GPU benchmark score"),
maxGPUScore: Optional[float] = Query(None, description="Maximum GPU benchmark score"),
minCpuMultiThreadScore: Optional[float] = Query(None, description="Minimum CPU multi-thread benchmark score"),
maxCpuMultiThreadScore: Optional[float] = Query(None, description="Maximum CPU multi-thread benchmark score"),
minCpuSingleThreadScore: Optional[float] = Query(None, description="Minimum CPU single-thread benchmark score"),
Expand Down Expand Up @@ -420,6 +422,20 @@ def filter_providers(
eligible_providers = eligible_providers.exclude(
Q(pingresult__is_p2p=True) & Q(pingresult__from_non_p2p_pinger=True)
)

if minGPUScore is not None:
eligible_providers = eligible_providers.annotate(latest_gpu_score=Subquery(
GPUTask.objects.filter(
provider=OuterRef('pk')
).order_by('-created_at').values('gpu_burn_gflops')[:1]
)).filter(latest_gpu_score__gte=minGPUScore)

if maxGPUScore is not None:
eligible_providers = eligible_providers.annotate(latest_gpu_score=Subquery(
GPUTask.objects.filter(
provider=OuterRef('pk')
).order_by('-created_at').values('gpu_burn_gflops')[:1]
)).filter(latest_gpu_score__lte=maxGPUScore)

provider_ids = eligible_providers.values_list('node_id', flat=True)
return {"provider_ids": list(provider_ids)}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -378,4 +378,23 @@ def online_provider_summary(request):
})

return JsonResponse(result, safe=False)
from api.models import GPUTask
@api.get("/benchmark/gpu/{node_id}")
def get_gpu_benchmark(request, node_id: str):
provider = Provider.objects.filter(node_id=node_id).first()
if not provider:
return JsonResponse({"detail": "Provider not found"}, status=404)

benchmarks = GPUTask.objects.filter(provider=provider).values(
'gpu_burn_gflops', 'created_at'
)

scores = [benchmark['gpu_burn_gflops'] for benchmark in benchmarks]

result = {
"data": [{"score": benchmark['gpu_burn_gflops'], "timestamp": benchmark['created_at'].timestamp()} for benchmark in benchmarks],
"deviation": calculate_deviation(scores),
"summary": get_summary(calculate_deviation(scores))
}

return JsonResponse(result)

0 comments on commit 6fad113

Please sign in to comment.