Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions examples/sn13_on_demand_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""
Example of using the SN13 On Demand Data Streaming service with Macrocosmos SDK.
"""

import os

import macrocosmos as mc

api_key = os.environ.get("SN13_API_KEY", os.environ.get("MACROCOSMOS_API_KEY"))

client = mc.Sn13Client(
api_key=api_key, app_name="examples/sn13_on_demand_basic.py", secure=False
)

response = client.sn13.OnDemandData(
source="x",
usernames=["nasa", "spacex"],
keywords=["photo", "space", "mars"],
start_date="2024-04-01",
end_date="2025-04-25",
limit=3,
)

print(response)
31 changes: 30 additions & 1 deletion protos/sn13/v1/sn13_validator.proto
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
syntax = "proto3";

package sn13.v1;

import "google/protobuf/struct.proto";

option go_package = "macrocosm-os/rift/constellation_api/gen/sn13/v1";

Expand All @@ -10,6 +10,9 @@ service Sn13Service {
// ListTopics is the RPC method for getting the top topics
rpc ListTopics(ListTopicsRequest) returns (ListTopicsResponse);
rpc ValidateRedditTopic(ValidateRedditTopicRequest) returns (ValidateRedditTopicResponse);

// Access the SN13 API endpoint on_demand_data_request via Constellation
rpc OnDemandData(OnDemandDataRequest) returns (OnDemandDataResponse);
}

// ListTopicsRequest is the request message for getting the top topics
Expand Down Expand Up @@ -53,3 +56,29 @@ message ValidateRedditTopicResponse {
// quarantine: whether the topic is quarantined
bool quarantine = 5;
}

// OnDemandDataRequest is a request to SN13 to retrieve data
message OnDemandDataRequest {
// source: the data source (X or Reddit)
string source = 1;
// usernames: list of usernames to fetch data from
repeated string usernames = 2;
// keywords: list of keywords to search for
repeated string keywords = 3;
// start_date: ISO 8601 formatted date string (e.g. "2024-01-01T00:00:00Z")
optional string start_date = 4;
// end_date: ISO 8601 formatted date string (e.g. "2024-01-31T23:59:59Z")
optional string end_date = 5;
// limit: maximum number of results to return
optional int64 limit = 6;
}

// OnDemandDataResponse is the response from SN13 for an on-demand data request
message OnDemandDataResponse {
// status: the request status, either success/error
string status = 1;
// data: the data object returned
repeated google.protobuf.Struct data = 2;
// meta: additional metadata about the request
google.protobuf.Struct meta = 3;
}
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "macrocosmos"
version = "1.0.1"
version = "1.0.2"
description = "The official Python SDK for Macrocosmos"
readme = "README.md"
license = "Apache-2.0"
Expand Down
4 changes: 4 additions & 0 deletions src/macrocosmos/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
GravityClient,
BillingClient,
AsyncBillingClient,
Sn13Client,
AsyncSn13Client,
)
from .types import (
ChatCompletionChunkResponse,
Expand All @@ -47,4 +49,6 @@
"ChatCompletionChunkResponse",
"SamplingParameters",
"WebRetrievalResponse",
"Sn13Client",
"AsyncSn13Client",
]
88 changes: 88 additions & 0 deletions src/macrocosmos/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
SyncCompletions,
)
from macrocosmos.resources.gravity import AsyncGravity, SyncGravity
from macrocosmos.resources.sn13 import AsyncSn13, SyncSn13
from macrocosmos.resources.web_search import AsyncWebSearch, SyncWebSearch
from macrocosmos.resources.billing import AsyncBilling, SyncBilling
from macrocosmos.resources._client import BaseClient
Expand Down Expand Up @@ -265,3 +266,90 @@ def __init__(
)

self.billing = SyncBilling(self)


class AsyncSn13Client(BaseClient):
"""
Asynchronous client for the SN13 API service.
"""

def __init__(
self,
api_key: Optional[str] = None,
base_url: Optional[str] = None,
timeout: Optional[int] = None,
max_retries: int = 0,
compress: bool = True,
secure: Optional[bool] = None,
app_name: Optional[str] = None,
):
"""
Initialize the asynchronous SN13 API client.


Args:
api_key: The API key.
base_url: The base URL for the API.
timeout: Time to wait for a response in seconds. (default: None)
max_retries: The maximum number of retries. (default: 0)
compress: Whether to compress the request using gzip (default: True).
secure: Whether to use HTTPS (default: True).
app_name: The name of the application using the client.
"""
if not api_key:
api_key = os.environ.get("SN13_API_KEY")

super().__init__(
api_key=api_key,
base_url=base_url,
timeout=timeout,
max_retries=max_retries,
secure=secure,
compress=compress,
app_name=app_name,
)

self.sn13 = AsyncSn13(self)


class Sn13Client(BaseClient):
"""
Synchronous client for the SN13 API service.
"""

def __init__(
self,
api_key: Optional[str] = None,
base_url: Optional[str] = None,
timeout: Optional[int] = None,
max_retries: int = 0,
secure: Optional[bool] = None,
compress: bool = True,
app_name: Optional[str] = None,
):
"""
Initialize the synchronous SN13 API client.

Args:
api_key: The API key.
base_url: The base URL for the API.
timeout: Time to wait for a response in seconds. (default: None)
max_retries: The maximum number of retries. (default: 0)
secure: Whether to use HTTPS (default: True).
compress: Whether to compress the request using gzip (default: True).
app_name: The name of the application using the client.
"""
if not api_key:
api_key = os.environ.get("SN13_API_KEY")

super().__init__(
api_key=api_key,
base_url=base_url,
timeout=timeout,
max_retries=max_retries,
secure=secure,
compress=compress,
app_name=app_name,
)

self.sn13 = SyncSn13(self)
30 changes: 30 additions & 0 deletions src/macrocosmos/generated/sn13/v1/sn13_validator_p2p.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,3 +59,33 @@ class ValidateRedditTopicResponse(BaseModel):
over18: bool = Field(default=False)
# quarantine: whether the topic is quarantined
quarantine: bool = Field(default=False)

class OnDemandDataRequest(BaseModel):
"""
OnDemandDataRequest is a request to SN13 to retrieve data
"""

# source: the data source (X or Reddit)
source: str = Field(default="")
# usernames: list of usernames to fetch data from
usernames: typing.List[str] = Field(default_factory=list)
# keywords: list of keywords to search for
keywords: typing.List[str] = Field(default_factory=list)
# start_date: ISO 8601 formatted date string (e.g. "2024-01-01T00:00:00Z")
start_date: typing.Optional[str] = Field(default="")
# end_date: ISO 8601 formatted date string (e.g. "2024-01-31T23:59:59Z")
end_date: typing.Optional[str] = Field(default="")
# limit: maximum number of results to return
limit: typing.Optional[int] = Field(default=0)

class OnDemandDataResponse(BaseModel):
"""
OnDemandDataResponse is the response from SN13 for an on-demand data request
"""

# status: the request status, either success/error
status: str = Field(default="")
# data: the data object returned
data: typing.List[typing.Dict[str, typing.Any]] = Field(default_factory=list)
# meta: additional metadata about the request
meta: typing.Dict[str, typing.Any] = Field(default_factory=dict)
31 changes: 18 additions & 13 deletions src/macrocosmos/generated/sn13/v1/sn13_validator_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

27 changes: 27 additions & 0 deletions src/macrocosmos/generated/sn13/v1/sn13_validator_pb2.pyi
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from google.protobuf import struct_pb2 as _struct_pb2
from google.protobuf.internal import containers as _containers
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
Expand Down Expand Up @@ -46,3 +47,29 @@ class ValidateRedditTopicResponse(_message.Message):
over18: bool
quarantine: bool
def __init__(self, platform: _Optional[str] = ..., topic: _Optional[str] = ..., exists: bool = ..., over18: bool = ..., quarantine: bool = ...) -> None: ...

class OnDemandDataRequest(_message.Message):
__slots__ = ("source", "usernames", "keywords", "start_date", "end_date", "limit")
SOURCE_FIELD_NUMBER: _ClassVar[int]
USERNAMES_FIELD_NUMBER: _ClassVar[int]
KEYWORDS_FIELD_NUMBER: _ClassVar[int]
START_DATE_FIELD_NUMBER: _ClassVar[int]
END_DATE_FIELD_NUMBER: _ClassVar[int]
LIMIT_FIELD_NUMBER: _ClassVar[int]
source: str
usernames: _containers.RepeatedScalarFieldContainer[str]
keywords: _containers.RepeatedScalarFieldContainer[str]
start_date: str
end_date: str
limit: int
def __init__(self, source: _Optional[str] = ..., usernames: _Optional[_Iterable[str]] = ..., keywords: _Optional[_Iterable[str]] = ..., start_date: _Optional[str] = ..., end_date: _Optional[str] = ..., limit: _Optional[int] = ...) -> None: ...

class OnDemandDataResponse(_message.Message):
__slots__ = ("status", "data", "meta")
STATUS_FIELD_NUMBER: _ClassVar[int]
DATA_FIELD_NUMBER: _ClassVar[int]
META_FIELD_NUMBER: _ClassVar[int]
status: str
data: _containers.RepeatedCompositeFieldContainer[_struct_pb2.Struct]
meta: _struct_pb2.Struct
def __init__(self, status: _Optional[str] = ..., data: _Optional[_Iterable[_Union[_struct_pb2.Struct, _Mapping]]] = ..., meta: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ...
44 changes: 44 additions & 0 deletions src/macrocosmos/generated/sn13/v1/sn13_validator_pb2_grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,11 @@ def __init__(self, channel):
request_serializer=sn13_dot_v1_dot_sn13__validator__pb2.ValidateRedditTopicRequest.SerializeToString,
response_deserializer=sn13_dot_v1_dot_sn13__validator__pb2.ValidateRedditTopicResponse.FromString,
_registered_method=True)
self.OnDemandData = channel.unary_unary(
'/sn13.v1.Sn13Service/OnDemandData',
request_serializer=sn13_dot_v1_dot_sn13__validator__pb2.OnDemandDataRequest.SerializeToString,
response_deserializer=sn13_dot_v1_dot_sn13__validator__pb2.OnDemandDataResponse.FromString,
_registered_method=True)


class Sn13ServiceServicer(object):
Expand All @@ -61,6 +66,13 @@ def ValidateRedditTopic(self, request, context):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')

def OnDemandData(self, request, context):
"""Access the SN13 API endpoint on_demand_data_request via Constellation
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')


def add_Sn13ServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
Expand All @@ -74,6 +86,11 @@ def add_Sn13ServiceServicer_to_server(servicer, server):
request_deserializer=sn13_dot_v1_dot_sn13__validator__pb2.ValidateRedditTopicRequest.FromString,
response_serializer=sn13_dot_v1_dot_sn13__validator__pb2.ValidateRedditTopicResponse.SerializeToString,
),
'OnDemandData': grpc.unary_unary_rpc_method_handler(
servicer.OnDemandData,
request_deserializer=sn13_dot_v1_dot_sn13__validator__pb2.OnDemandDataRequest.FromString,
response_serializer=sn13_dot_v1_dot_sn13__validator__pb2.OnDemandDataResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'sn13.v1.Sn13Service', rpc_method_handlers)
Expand Down Expand Up @@ -138,3 +155,30 @@ def ValidateRedditTopic(request,
timeout,
metadata,
_registered_method=True)

@staticmethod
def OnDemandData(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(
request,
target,
'/sn13.v1.Sn13Service/OnDemandData',
sn13_dot_v1_dot_sn13__validator__pb2.OnDemandDataRequest.SerializeToString,
sn13_dot_v1_dot_sn13__validator__pb2.OnDemandDataResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)
Loading