Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: nip-50 support (postgres) #183

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,10 @@ impl From<Settings> for RelayInfo {
fn from(c: Settings) -> Self {
let mut supported_nips = vec![1, 2, 9, 11, 12, 15, 16, 20, 22, 33, 40];

if c.database.engine == "postgres" {
supported_nips.push(50);
}

if c.authorization.nip42_auth {
supported_nips.push(42);
supported_nips.sort();
Expand Down
58 changes: 34 additions & 24 deletions src/repo/postgres.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ async fn cleanup_expired(conn: PostgresPool, frequency: Duration) -> Result<()>
}
}
}
};
}
}
});
Ok(())
Expand Down Expand Up @@ -149,19 +149,19 @@ impl NostrRepo for PostgresRepo {
VALUES($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT (id) DO NOTHING"#,
)
.bind(&id_blob)
.bind(&pubkey_blob)
.bind(Utc.timestamp_opt(e.created_at as i64, 0).unwrap())
.bind(
e.expiration()
.and_then(|x| Utc.timestamp_opt(x as i64, 0).latest()),
)
.bind(e.kind as i64)
.bind(event_str.into_bytes())
.bind(delegator_blob)
.execute(&mut tx)
.await?
.rows_affected();
.bind(&id_blob)
.bind(&pubkey_blob)
.bind(Utc.timestamp_opt(e.created_at as i64, 0).unwrap())
.bind(
e.expiration()
.and_then(|x| Utc.timestamp_opt(x as i64, 0).latest()),
)
.bind(e.kind as i64)
.bind(event_str)
.bind(delegator_blob)
.execute(&mut tx)
.await?
.rows_affected();

if ins_count == 0 {
// if the event was a duplicate, no need to insert event or
Expand Down Expand Up @@ -281,10 +281,10 @@ ON CONFLICT (id) DO NOTHING"#,
LEFT JOIN tag t ON e.id = t.event_id \
WHERE e.pub_key = $1 AND t.\"name\" = 'e' AND e.kind = 5 AND t.value = $2 LIMIT 1",
)
.bind(&pubkey_blob)
.bind(&id_blob)
.fetch_optional(&mut tx)
.await?;
.bind(&pubkey_blob)
.bind(&id_blob)
.fetch_optional(&mut tx)
.await?;

// check if a the query returned a result, meaning we should
// hid the current event
Expand Down Expand Up @@ -393,7 +393,7 @@ ON CONFLICT (id) DO NOTHING"#,
}

row_count += 1;
let event_json: Vec<u8> = row.unwrap().get(0);
let event_json: String = row.unwrap().get(0);
loop {
if query_tx.capacity() != 0 {
// we have capacity to add another item
Expand Down Expand Up @@ -421,7 +421,7 @@ ON CONFLICT (id) DO NOTHING"#,
query_tx
.send(QueryResult {
sub_id: sub.get_id(),
event: String::from_utf8(event_json).unwrap(),
event: event_json,
})
.await
.ok();
Expand Down Expand Up @@ -575,10 +575,10 @@ ON CONFLICT (id) DO NOTHING"#,
sqlx::query(
"UPDATE account SET is_admitted = TRUE, balance = balance - $1 WHERE pubkey = $2",
)
.bind(admission_cost as i64)
.bind(pub_key)
.execute(&self.conn_write)
.await?;
.bind(admission_cost as i64)
.bind(pub_key)
.execute(&self.conn_write)
.await?;
Ok(())
}

Expand Down Expand Up @@ -863,6 +863,16 @@ fn query_from_filter(f: &ReqFilter) -> Option<QueryBuilder<Postgres>> {
.push_bind(Utc.timestamp_opt(f.until.unwrap() as i64, 0).unwrap());
}

if let Some(search) = &f.search {
if push_and {
query.push(" AND ");
}
push_and = true;
query.push("e.ts_content @@ websearch_to_tsquery(")
.push_bind(search.clone())
.push(")");
}

// never display hidden events
if push_and {
query.push(" AND e.hidden != 1::bit(1)");
Expand Down
28 changes: 27 additions & 1 deletion src/repo/postgres_migration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ pub async fn run_migrations(db: &PostgresPool) -> crate::error::Result<usize> {
run_migration(m003::migration(), db).await;
run_migration(m004::migration(), db).await;
run_migration(m005::migration(), db).await;
run_migration(m006::migration(), db).await;
Ok(current_version(db).await as usize)
}

Expand Down Expand Up @@ -286,7 +287,7 @@ mod m005 {

pub fn migration() -> impl Migration {
SimpleSqlMigration {
serial_number: VERSION,
serial_number: crate::repo::postgres_migration::m005::VERSION,
sql: vec![
r#"
-- Create account table
Expand Down Expand Up @@ -318,3 +319,28 @@ CREATE TABLE "invoice" (
}
}
}

mod m006 {
use crate::repo::postgres_migration::{Migration, SimpleSqlMigration};

pub const VERSION: i64 = 6;

pub fn migration() -> impl Migration {
SimpleSqlMigration {
serial_number: VERSION,
sql: vec![
r#"
--- Use text column for content
ALTER TABLE "event" ALTER COLUMN "content" TYPE text USING convert_from("content", 'utf-8);

--- Create search col for event content
ALTER TABLE event
ADD COLUMN ts_content tsvector
GENERATED ALWAYS AS (to_tsvector('english', "content"::json->>'content')) stored;

CREATE INDEX ts_content_idx ON event USING GIN (ts_content);
"#,
],
}
}
}
14 changes: 11 additions & 3 deletions src/subscription.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,12 @@ pub struct ReqFilter {
/// Set of tags
pub tags: Option<HashMap<char, HashSet<String>>>,
/// Force no matches due to malformed data
// we can't represent it in the req filter, so we don't want to
// erroneously match. This basically indicates the req tried to
// do something invalid.
/// we can't represent it in the req filter, so we don't want to
/// erroneously match. This basically indicates the req tried to
/// do something invalid.
pub force_no_match: bool,
/// NIP-50 search query
pub search: Option<String>,
}

impl Serialize for ReqFilter {
Expand All @@ -67,6 +69,9 @@ impl Serialize for ReqFilter {
if let Some(authors) = &self.authors {
map.serialize_entry("authors", &authors)?;
}
if let Some(search) = &self.search {
map.serialize_entry("search", &search)?;
}
// serialize tags
if let Some(tags) = &self.tags {
for (k, v) in tags {
Expand Down Expand Up @@ -98,6 +103,7 @@ impl<'de> Deserialize<'de> for ReqFilter {
authors: None,
limit: None,
tags: None,
search: None,
force_no_match: false,
};
let empty_string = "".into();
Expand All @@ -124,6 +130,8 @@ impl<'de> Deserialize<'de> for ReqFilter {
rf.until = Deserialize::deserialize(val).ok();
} else if key == "limit" {
rf.limit = Deserialize::deserialize(val).ok();
} else if key == "search" {
rf.search = Deserialize::deserialize(val).ok();
} else if key == "authors" {
let raw_authors: Option<Vec<String>> = Deserialize::deserialize(val).ok();
if let Some(a) = raw_authors.as_ref() {
Expand Down