Skip to content

Commit

Permalink
Dump pricing data of tasks
Browse files Browse the repository at this point in the history
  • Loading branch information
cryptobench committed Mar 3, 2024
1 parent ab024da commit a22338a
Show file tree
Hide file tree
Showing 4 changed files with 121 additions and 31 deletions.
46 changes: 46 additions & 0 deletions stats-backend/api2/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -865,3 +865,49 @@ def pricing_snapshot_stats_with_dates(start_date, end_date, network):
networks_data[network] = data

r.set("pricing_data_charted_v2", json.dumps(networks_data))


@app.task
def get_provider_task_data():
response = {
"testnet": {"1d": [], "7d": [], "1m": [], "1y": [], "All": []},
"mainnet": {"1d": [], "7d": [], "1m": [], "1y": [], "All": []},
}

networks = ["testnet", "mainnet"]

timeframes = {
"1d": datetime.now() - timedelta(days=1),
"7d": datetime.now() - timedelta(days=7),
"1m": datetime.now() - timedelta(days=30),
"1y": datetime.now() - timedelta(days=365),
}

for network in networks:
data = (
ProviderWithTask.objects.filter(network=network)
.prefetch_related("instance", "offer")
.select_related("offer__cheaper_than", "offer__overpriced_compared_to")
.order_by("created_at")
)

for entry in data:
entry_data = {
"providerName": entry.offer.properties.get("golem.node.id.name", ""),
"providerId": entry.instance.node_id,
"cores": entry.offer.properties.get("golem.inf.cpu.cores", 0),
"memory": entry.offer.properties.get("golem.inf.mem.gib", 0),
"disk": entry.offer.properties.get("golem.inf.storage.gib", 0),
"cpuh": entry.cpu_per_hour,
"envh": entry.env_per_hour,
"start": entry.start_price,
"date": entry.created_at.timestamp(),
}

response[network]["All"].append(entry_data)

for timeframe, start_date in timeframes.items():
if entry.created_at > start_date:
response[network][timeframe].append(entry_data)

r.set("provider_task_price_data", json.dumps(response))
1 change: 1 addition & 0 deletions stats-backend/api2/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
path("network/offers/cheapest/cores", views.cheapest_by_cores),
path("network/pricing/1h", views.pricing_past_hour),
path("network/pricing/historical", views.historical_pricing_data),
path("network/pricing/dump", views.task_pricing),
path("provider/wallet/<wallet>", views.node_wallet),
path("provider/node/<yagna_id>", views.node),
path("provider/uptime/<yagna_id>", views.node_uptime),
Expand Down
98 changes: 67 additions & 31 deletions stats-backend/api2/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,19 @@ async def pricing_past_hour(request):
return JsonResponse({"error": str(e)}, status=500)


async def task_pricing(request):
try:
pool = aioredis.ConnectionPool.from_url(
"redis://redis:6379/0", decode_responses=True
)
r = aioredis.Redis(connection_pool=pool)
pricing_data = json.loads(await r.get("provider_task_price_data"))
pool.disconnect()
return JsonResponse(pricing_data)
except Exception as e:
return JsonResponse({"error": str(e)}, status=500)


async def list_ec2_instances_comparison(request):
if request.method == "GET":
pool = aioredis.ConnectionPool.from_url(
Expand Down Expand Up @@ -92,12 +105,15 @@ async def historical_pricing_data(request):
def node_uptime(request, yagna_id):
node = Node.objects.filter(node_id=yagna_id).first()
if not node:
return JsonResponse({
"first_seen": None,
"data": [],
"downtime_periods": [],
"status": "offline",
}, status=404)
return JsonResponse(
{
"first_seen": None,
"data": [],
"downtime_periods": [],
"status": "offline",
},
status=404,
)

statuses = NodeStatusHistory.objects.filter(provider=node).order_by("timestamp")
response_data, downtime_periods = [], []
Expand All @@ -111,51 +127,74 @@ def node_uptime(request, yagna_id):
day = first_seen_date + timedelta(days=day_offset)
day_start = timezone.make_aware(datetime.combine(day, datetime.min.time()))
day_end = day_start + timedelta(days=1)
data_points_for_day = statuses.filter(timestamp__range=(day_start, day_end)).distinct('timestamp')

data_points_for_day = statuses.filter(
timestamp__range=(day_start, day_end)
).distinct("timestamp")

if data_points_for_day.exists():
for point in data_points_for_day:
if not point.is_online:
if last_offline_timestamp is None:
last_offline_timestamp = point.timestamp
else:
if last_offline_timestamp is not None:
downtime_periods.append(process_downtime(last_offline_timestamp, point.timestamp))
downtime_periods.append(
process_downtime(last_offline_timestamp, point.timestamp)
)
last_offline_timestamp = None

response_data.append({
"tooltip": "Today" if day == today_date else f"{total_days - day_offset - 1} day{'s' if (total_days - day_offset - 1) > 1 else ''} ago",
"status": "online" if point.is_online else "offline",
})
response_data.append(
{
"tooltip": (
"Today"
if day == today_date
else f"{total_days - day_offset - 1} day{'s' if (total_days - day_offset - 1) > 1 else ''} ago"
),
"status": "online" if point.is_online else "offline",
}
)
else:
# Assume the status did not change this day, infer from last known status if available
last_known_status = statuses.filter(timestamp__lt=day_start).last()
inferred_status = last_known_status.is_online if last_known_status else False # default to offline if unknown
tooltip = "Today" if day == today_date else f"{total_days - day_offset - 1} day{'s' if (total_days - day_offset - 1) > 1 else ''} ago"
response_data.append({
"tooltip": tooltip,
"status": "online" if inferred_status else "offline",
})
inferred_status = (
last_known_status.is_online if last_known_status else False
) # default to offline if unknown
tooltip = (
"Today"
if day == today_date
else f"{total_days - day_offset - 1} day{'s' if (total_days - day_offset - 1) > 1 else ''} ago"
)
response_data.append(
{
"tooltip": tooltip,
"status": "online" if inferred_status else "offline",
}
)

# Handling ongoing downtime
if last_offline_timestamp is not None:
downtime_periods.append(process_downtime(last_offline_timestamp, current_time))

return JsonResponse({
"first_seen": node.uptime_created_at.strftime("%Y-%m-%d %H:%M:%S"),
"uptime_percentage": calculate_uptime_percentage(yagna_id, node),
"data": response_data,
"downtime_periods": downtime_periods,
"current_status": "online" if node.online else "offline",
})
return JsonResponse(
{
"first_seen": node.uptime_created_at.strftime("%Y-%m-%d %H:%M:%S"),
"uptime_percentage": calculate_uptime_percentage(yagna_id, node),
"data": response_data,
"downtime_periods": downtime_periods,
"current_status": "online" if node.online else "offline",
}
)


def process_downtime(start_time, end_time):
duration = (end_time - start_time).total_seconds()
days, remainder = divmod(duration, 86400)
hours, remainder = divmod(remainder, 3600)
minutes = remainder // 60
down_timestamp = f"From {start_time.strftime('%I:%M %p')} on {start_time.strftime('%B %d, %Y')} " \
f"to {end_time.strftime('%I:%M %p')} on {end_time.strftime('%B %d, %Y')}"
down_timestamp = (
f"From {start_time.strftime('%I:%M %p')} on {start_time.strftime('%B %d, %Y')} "
f"to {end_time.strftime('%I:%M %p')} on {end_time.strftime('%B %d, %Y')}"
)

parts = []
if days:
Expand All @@ -180,9 +219,6 @@ def calculate_time_diff(check_time, granularity, node):
return f"{minutes_ago} minutes ago" if minutes_ago > 1 else "1 minute ago"





def globe_data(request):
# open json file and return data
with open("/globe_data.geojson") as json_file:
Expand Down
7 changes: 7 additions & 0 deletions stats-backend/core/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def setup_periodic_tasks(sender, **kwargs):
median_and_average_pricing_past_hour,
chart_pricing_data_for_frontend,
v2_network_online_to_redis_new_stats_page,
get_provider_task_data,
)

# sender.add_periodic_task(
Expand All @@ -85,6 +86,12 @@ def setup_periodic_tasks(sender, **kwargs):
queue="default",
options={"queue": "default", "routing_key": "default"},
)
sender.add_periodic_task(
crontab(minute="*/11"),
get_provider_task_data.s(),
queue="default",
options={"queue": "default", "routing_key": "default"},
)
sender.add_periodic_task(
crontab(minute="*/10"),
chart_pricing_data_for_frontend.s(),
Expand Down

0 comments on commit a22338a

Please sign in to comment.