Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion folding/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "3.0.3"
__version__ = "3.0.4"
version_split = __version__.split(".")
__spec_version__ = (
(10000 * int(version_split[0]))
Expand Down
10 changes: 4 additions & 6 deletions folding/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,10 @@
import sqlite3
import requests
from queue import Queue
from typing import Dict, List
from typing import List
from dotenv import load_dotenv

from datetime import timezone
from datetime import datetime

import numpy as np
import pandas as pd

from atom.epistula.epistula import Epistula
Expand Down Expand Up @@ -79,7 +77,7 @@ def get_queue(self, validator_hotkey: str, ready=True) -> Queue:

if ready:
# Calculate the threshold time for ready jobs
now = datetime.utcnow().isoformat()
now = datetime.now(timezone.utc).isoformat()
query = f"""
SELECT * FROM {self.table_name}
WHERE active = 1
Expand Down Expand Up @@ -359,7 +357,7 @@ async def update(self, loss: float, hotkey: str):
self.best_loss = loss
self.best_loss_at = pd.Timestamp.now().floor("s")
self.best_hotkey = hotkey
self.updated_at = datetime.now()
self.updated_at = datetime.now(timezone.utc)


class MockJob(Job):
Expand Down
4 changes: 2 additions & 2 deletions folding/utils/s3_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def get(self, key: str, output_path: str) -> None:
pass

@abstractmethod
def generate_presigned_url(self, key: str, expires_in: int = 3600) -> str:
def generate_presigned_url(self, key: str, expires_in: int = 7200) -> str:
"""Generates a presigned URL for temporary access to an object."""
pass

Expand Down Expand Up @@ -218,7 +218,7 @@ def generate_presigned_url(
miner_hotkey: str,
pdb_id: str,
file_name: str,
expires_in: int = 3600,
expires_in: int = 7200,
method: str = "get_object",
) -> dict[str, Any]:
"""Generates a presigned URL for temporary access to an object.
Expand Down
2 changes: 1 addition & 1 deletion folding/validators/forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ async def try_prepare_md_challenge(self, config, pdb_id: str) -> Dict:
)

try:
async with timeout(300):
async with timeout(600):
await protein.setup_simulation()

if protein.init_energy > 0:
Expand Down
16 changes: 8 additions & 8 deletions neurons/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import asyncio
import traceback

from datetime import datetime
from datetime import datetime, timezone
from typing import Any, Dict, List

import netaddr
Expand Down Expand Up @@ -104,10 +104,10 @@ def __init__(self, config=None):
self.last_time_checked = (
self.last_time_checked
if hasattr(self, "last_time_checked")
else datetime.now()
else datetime.now(timezone.utc)
)

self.last_time_created_jobs = datetime.now()
self.last_time_created_jobs = datetime.now(timezone.utc)

if not self.config.s3.off:
try:
Expand Down Expand Up @@ -277,7 +277,7 @@ async def add_job(self, job_event: dict[str, Any], protein: Protein = None) -> b
job_event["s3_links"] = {
"testing": "testing"
} # overwritten below if s3 logging is on.
async with timeout(300):
async with timeout(600):
logger.info(
f"setup_simulation for organic query: {job_event['pdb_id']}"
)
Expand Down Expand Up @@ -308,7 +308,7 @@ async def add_job(self, job_event: dict[str, Any], protein: Protein = None) -> b
str(spec_version),
job_event["pdb_id"],
self.validator_hotkey_reference,
datetime.now().strftime("%Y-%m-%d_%H-%M-%S"),
datetime.now(timezone.utc).strftime("%Y-%m-%d_%H-%M-%S"),
)
s3_links = {}
for file_type, file_path in files_to_upload.items():
Expand Down Expand Up @@ -350,7 +350,7 @@ async def add_job(self, job_event: dict[str, Any], protein: Protein = None) -> b

logger.success("Job was uploaded successfully!")

self.last_time_created_jobs = datetime.now()
self.last_time_created_jobs = datetime.now(timezone.utc)

# TODO: return job_id
return True
Expand Down Expand Up @@ -501,7 +501,7 @@ async def prepare_event_for_logging(event: Dict):
output_links.append(defaultdict(str))

best_cpt_files = []
output_time = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
output_time = datetime.now(timezone.utc).strftime("%Y-%m-%d_%H-%M-%S")

for idx, (uid, files) in enumerate(
zip(job.event["processed_uids"], job.event["files"])
Expand Down Expand Up @@ -664,7 +664,7 @@ async def read_and_update_rewards(self):
last_time_checked=self.last_time_checked.strftime("%Y-%m-%dT%H:%M:%S")
)

self.last_time_checked = datetime.now()
self.last_time_checked = datetime.now(timezone.utc)

if inactive_jobs_queue.qsize() == 0:
logger.info("No inactive jobs to update.")
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "folding"
version = "3.0.3"
version = "3.0.4"
description = "Macrocosmos Subnet 25: Mainframe"
authors = ["Brian McCrindle <[email protected]>", "Sergio Champoux <[email protected]>", "Szymon Fonau <[email protected]>"]

Expand Down