Match with bsky pds v2 refactor

This commit is contained in:
Rudy Fraser
2024-02-24 17:18:55 -05:00
parent a2ebfee7e3
commit ba1ca14307
9 changed files with 359 additions and 641 deletions

12
.idea/dataSources.xml generated Normal file
View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
<data-source source="LOCAL" name="rsky_local@localhost" uuid="0ce56055-df2a-4228-8e4b-3f12828811e4">
<driver-ref>postgresql</driver-ref>
<synchronize>true</synchronize>
<jdbc-driver>org.postgresql.Driver</jdbc-driver>
<jdbc-url>jdbc:postgresql://localhost:5432/rsky_local</jdbc-url>
<working-dir>$ProjectFileDir$</working-dir>
</data-source>
</component>
</project>

7
.idea/sqldialects.xml generated Normal file
View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SqlDialectMappings">
<file url="file://$PROJECT_DIR$/rsky-pds/migrations/2023-11-15-004814_pds_init/up.sql" dialect="PostgreSQL" />
<file url="PROJECT" dialect="PostgreSQL" />
</component>
</project>

View File

@ -14,7 +14,7 @@ documentation = "https://docs.rs/rsky-pds"
rocket = { version = "=0.5.0-rc.3", features = ["json"] }
dotenvy = "0.15"
rsky-lexicon = { workspace = true, version = "0.1.0" }
diesel = { version = "2.1.0", features = ["postgres"] }
diesel = { version = "2.1.0", features = ["postgres","chrono"] }
chrono = "0.4.26"
serde = { version = "1.0.160", features = ["derive"] }
serde_derive = "^1.0"
@ -34,6 +34,7 @@ secp256k1 = { version = "0.28.2", features = ["global-context", "serde", "rand",
rand_core = "0.6.4"
sha2 = "0.11.0-pre.3"
indexmap = { version = "1.9.3",features = ["serde-1"] }
hex = "0.4.3"
[dependencies.rocket_sync_db_pools]
version = "=0.1.0-rc.3"

View File

@ -1,24 +1,18 @@
-- This file should undo anything in `up.sql`
DROP TABLE pds.email_token;
DROP TABLE pds.runtime_flag;
DROP TABLE pds.user_pref;
DROP TABLE pds.actor;
DROP TABLE pds.user_account;
DROP TABLE pds.repo_seq;
DROP TABLE pds.repo_root;
DROP TABLE pds.repo_blob;
DROP TABLE pds.refresh_token;
DROP TABLE pds.did_doc;
DROP TABLE pds.account_pref;
DROP TABLE pds.backlink;
DROP TABLE pds.record_blob;
DROP TABLE pds.blob;
DROP TABLE pds.record;
DROP TABLE pds.moderation_report_resolution;
DROP TABLE pds.moderation_report;
DROP TABLE pds.moderation_action_subject_blob;
DROP TABLE pds.moderation_action;
DROP TABLE pds.ipld_block;
DROP TABLE pds.repo_block;
DROP TABLE pds.repo_root;
DROP TABLE pds.email_token;
DROP TABLE pds.account;
DROP TABLE pds.actor;
DROP TABLE pds.refresh_token;
DROP TABLE pds.invite_code_use;
DROP TABLE pds.invite_code;
DROP TABLE pds.did_handle;
DROP TABLE pds.did_cache;
DROP TABLE pds.blob;
DROP TABLE pds.backlink;
DROP TABLE pds.app_password;
DROP TABLE pds.app_migration;
DROP SCHEMA IF EXISTS pds;

View File

@ -1,13 +1,10 @@
-- Create PDS schema
CREATE SCHEMA IF NOT EXISTS pds;
-- Create App Migration Table
CREATE TABLE IF NOT EXISTS pds.app_migration (
id character varying PRIMARY KEY,
success smallint NOT NULL DEFAULT 0,
"completedAt" character varying
);
/* Based heavily on account-manager, did-cache, sequencer, and actor-store migrations
from the canonical TS implementation. */
-- account-manager implementation
-- Create App Password Table
CREATE TABLE IF NOT EXISTS pds.app_password (
did character varying NOT NULL,
@ -21,78 +18,17 @@ ALTER TABLE ONLY pds.app_password
ALTER TABLE ONLY pds.app_password
ADD CONSTRAINT app_password_pkey PRIMARY KEY (did, name);
-- Create Backlink Table
CREATE TABLE IF NOT EXISTS pds.backlink (
uri character varying NOT NULL,
"path" character varying NOT NULL,
"linkToUri" character varying,
"linkToDid" character varying
);
ALTER TABLE ONLY pds.backlink
DROP CONSTRAINT IF EXISTS backlink_pkey;
ALTER TABLE ONLY pds.backlink
ADD CONSTRAINT backlink_pkey PRIMARY KEY (uri, path);
ALTER TABLE ONLY pds.backlink
DROP CONSTRAINT IF EXISTS backlink_link_to_chk;
-- Exactly one of linkToUri or linkToDid should be set
ALTER TABLE ONLY pds.backlink
ADD CONSTRAINT backlink_link_to_chk
CHECK (
("linkToUri" is null and "linkToDid" is not null)
OR ("linkToUri" is not null and "linkToDid" is null)
);
CREATE INDEX backlink_path_to_uri_idx
ON pds.backlink(path, "linkToUri");
CREATE INDEX backlink_path_to_did_idx
ON pds.backlink(path, "linkToDid");
-- Create Blob Table
CREATE TABLE IF NOT EXISTS pds.blob (
creator character varying NOT NULL,
cid character varying NOT NULL,
"mimeType" character varying NOT NULL,
size integer NOT NULL,
"tempKey" character varying,
width integer,
height integer,
"createdAt" character varying NOT NULL
);
ALTER TABLE ONLY pds.blob
DROP CONSTRAINT IF EXISTS blob_pkey;
ALTER TABLE ONLY pds.blob
ADD CONSTRAINT blob_pkey PRIMARY KEY (creator, cid);
CREATE INDEX blob_tempkey_idx
ON pds.blob("tempKey");
-- Create DID Cache Table
CREATE TABLE IF NOT EXISTS pds.did_cache (
did character varying PRIMARY KEY,
doc text NOT NULL,
"updatedAt" bigint NOT NULL
);
-- Create DID Handle Table
CREATE TABLE IF NOT EXISTS pds.did_handle (
did character varying PRIMARY KEY,
handle character varying
);
CREATE UNIQUE INDEX did_handle_handle_lower_idx
ON pds.did_handle (LOWER(handle));
-- Create Invite Code Table
CREATE TABLE IF NOT EXISTS pds.invite_code (
code character varying PRIMARY KEY,
"availableUses" integer NOT NULL,
disabled smallint NOT NULL DEFAULT 0,
"forUser" character varying NOT NULL,
"forAccount" character varying NOT NULL,
"createdBy" character varying NOT NULL,
"createdAt" character varying NOT NULL
);
CREATE INDEX invite_code_for_user_idx
ON pds.invite_code("forUser");
CREATE INDEX invite_code_for_user_idx
ON pds.invite_code("forAccount");
-- Create Invite Code Use Table
CREATE TABLE IF NOT EXISTS pds.invite_code_use (
@ -106,109 +42,6 @@ ALTER TABLE ONLY pds.invite_code_use
ALTER TABLE ONLY pds.invite_code_use
ADD CONSTRAINT invite_code_use_pkey PRIMARY KEY (code, "usedBy");
-- Create IPLD Block Table
CREATE TABLE IF NOT EXISTS pds.ipld_block (
creator character varying NOT NULL,
cid character varying NOT NULL,
size integer NOT NULL,
content bytea NOT NULL,
"repoRev" character varying
);
ALTER TABLE ONLY pds.ipld_block
DROP CONSTRAINT IF EXISTS ipld_block_pkey;
ALTER TABLE ONLY pds.ipld_block
ADD CONSTRAINT ipld_block_pkey PRIMARY KEY (creator, cid);
CREATE INDEX ipld_block_repo_rev_idx
ON pds.ipld_block(creator, "repoRev", cid);
-- Create Moderation Action Table
CREATE TABLE IF NOT EXISTS pds.moderation_action (
id SERIAL PRIMARY KEY,
action character varying NOT NULL,
"subjectType" character varying NOT NULL,
"subjectDid" character varying NOT NULL,
"subjectUri" character varying,
"subjectCid" character varying,
reason text NOT NULL,
"createdAt" character varying NOT NULL,
"createdBy" character varying NOT NULL,
"reversedAt" character varying,
"reversedBy" character varying,
"reversedReason" text,
"createLabelVals" character varying,
"negateLabelVals" character varying,
"durationInHours" integer,
"expiresAt" character varying
);
-- Create Moderation Action Subject Blob Table
CREATE TABLE IF NOT EXISTS pds.moderation_action_subject_blob (
id SERIAL PRIMARY KEY,
"actionId" integer NOT NULL,
cid character varying NOT NULL,
"recordUri" character varying NOT NULL,
CONSTRAINT fk_subject_action
FOREIGN KEY("actionId")
REFERENCES pds.moderation_action(id)
);
ALTER TABLE ONLY pds.moderation_action_subject_blob
DROP CONSTRAINT IF EXISTS moderation_action_subject_blob_pkey;
ALTER TABLE ONLY pds.moderation_action_subject_blob
ADD CONSTRAINT moderation_action_subject_blob_pkey PRIMARY KEY ("actionId", cid, "recordUri");
-- Create Moderation Report Table
CREATE TABLE IF NOT EXISTS pds.moderation_report (
id SERIAL PRIMARY KEY,
"subjectType" character varying NOT NULL,
"subjectDid" character varying NOT NULL,
"subjectUri" character varying,
"subjectCid" character varying,
"reasonType" character varying NOT NULL,
reason text,
"reportedByDid" character varying NOT NULL,
"createdAt" character varying NOT NULL
);
-- Create Moderation Report Resolution Table
CREATE TABLE IF NOT EXISTS pds.moderation_report_resolution (
"reportId" integer NOT NULL,
"actionId" integer NOT NULL,
"createdAt" character varying NOT NULL,
"createdBy" character varying NOT NULL,
CONSTRAINT fk_report_resolution
FOREIGN KEY("reportId")
REFERENCES pds.moderation_report(id),
CONSTRAINT fk_action_resolution
FOREIGN KEY("actionId")
REFERENCES pds.moderation_action(id)
);
ALTER TABLE ONLY pds.moderation_report_resolution
DROP CONSTRAINT IF EXISTS moderation_report_resolution_pkey;
ALTER TABLE ONLY pds.moderation_report_resolution
ADD CONSTRAINT moderation_report_resolution_pkey PRIMARY KEY ("reportId","actionId");
CREATE INDEX moderation_report_resolution_action_id_idx
ON pds.moderation_report_resolution("actionId");
-- Create Record Table
CREATE TABLE IF NOT EXISTS pds.record (
uri character varying PRIMARY KEY,
cid character varying NOT NULL,
did character varying NOT NULL,
collection character varying NOT NULL,
"rkey" character varying NOT NULL,
"indexedAt" character varying NOT NULL,
"takedownRef" character varying,
"repoRev" character varying
);
CREATE INDEX record_did_cid_idx
ON pds.record(did, cid);
CREATE INDEX record_did_collection_idx
ON pds.record(did, collection);
CREATE INDEX record_repo_rev_idx
ON pds.record(did, "repoRev");
-- Create Refresh Token Table
CREATE TABLE IF NOT EXISTS pds.refresh_token (
id character varying PRIMARY KEY,
@ -218,51 +51,7 @@ CREATE TABLE IF NOT EXISTS pds.refresh_token (
"appPasswordName" character varying
);
CREATE INDEX refresh_token_did_idx -- Aids in refresh token cleanup
ON pds.refresh_token(did);
-- Create Repo Blob Table
CREATE TABLE IF NOT EXISTS pds.repo_blob (
cid character varying NOT NULL,
"recordUri" character varying NOT NULL,
did character varying NOT NULL,
"takedownRef" character varying,
"repoRev" character varying
);
ALTER TABLE ONLY pds.repo_blob
DROP CONSTRAINT IF EXISTS repo_blob_pkey;
ALTER TABLE ONLY pds.repo_blob
ADD CONSTRAINT repo_blob_pkey PRIMARY KEY (cid,"recordUri");
CREATE INDEX repo_blob_did_idx
ON pds.repo_blob(did);
CREATE INDEX repo_blob_repo_rev_idx
ON pds.repo_blob(did, "repoRev");
-- Create Repo Root Table
CREATE TABLE IF NOT EXISTS pds.repo_root (
did character varying PRIMARY KEY,
root character varying NOT NULL,
"indexedAt" character varying NOT NULL,
"takedownRef" character varying,
rev character varying
);
-- Create Repo Sequence Table
CREATE TABLE IF NOT EXISTS pds.repo_seq (
id bigserial PRIMARY KEY,
seq bigint UNIQUE,
did character varying NOT NULL,
"eventType" character varying NOT NULL,
event bytea NOT NULL,
invalidated smallint NOT NULL DEFAULT 0,
"sequencedAt" character varying NOT NULL
);
CREATE INDEX repo_seq_did_idx -- for filtering seqs based on did
ON pds.repo_seq(did);
CREATE INDEX repo_seq_event_type_idx -- for filtering seqs based on event type
ON pds.repo_seq("eventType");
CREATE INDEX repo_seq_sequenced_at_index -- for entering into the seq stream at a particular time
ON pds.repo_seq("sequencedAt");
ON pds.refresh_token(did);
-- Create Actor Table
CREATE TABLE IF NOT EXISTS pds.actor (
@ -276,37 +65,20 @@ CREATE UNIQUE INDEX actor_handle_lower_idx
CREATE INDEX actor_cursor_idx
ON pds.actor("createdAt", did);
-- Create User Account Table
CREATE TABLE IF NOT EXISTS pds.user_account (
-- Create Account Table
CREATE TABLE IF NOT EXISTS pds.account (
did character varying PRIMARY KEY,
email character varying NOT NULL,
"recoveryKey" character varying,
"recoveryKey" character varying, -- For storing Bring Your Own Key
"password" character varying NOT NULL,
"createdAt" character varying NOT NULL,
"invitesDisabled" smallint NOT NULL DEFAULT 0,
"inviteNote" character varying,
"emailConfirmedAt" character varying
);
CREATE UNIQUE INDEX user_account_email_lower_idx
ON pds.user_account (LOWER(email));
CREATE INDEX user_account_cursor_idx
ON pds.user_account("createdAt", did);
-- Create User Preference Table
CREATE TABLE IF NOT EXISTS pds.user_pref (
id bigserial PRIMARY KEY,
did character varying NOT NULL,
name character varying NOT NULL,
"valueJson" text NOT NULL
);
CREATE INDEX user_pref_did_idx
ON pds.user_pref(did);
-- Create Runtime Flag Table
CREATE TABLE IF NOT EXISTS pds.runtime_flag (
name character varying PRIMARY KEY,
value character varying NOT NULL
);
CREATE UNIQUE INDEX account_email_lower_idx
ON pds.account (LOWER(email));
CREATE INDEX account_cursor_idx
ON pds.account("createdAt", did);
-- Create Email Token Table
CREATE TABLE IF NOT EXISTS pds.email_token (
@ -319,5 +91,111 @@ ALTER TABLE ONLY pds.email_token
DROP CONSTRAINT IF EXISTS email_token_pkey;
ALTER TABLE ONLY pds.email_token
ADD CONSTRAINT email_token_pkey PRIMARY KEY (purpose, did);
CREATE UNIQUE INDEX email_token_purpose_token_unique
CREATE UNIQUE INDEX email_token_purpose_token_unique
ON pds.email_token (purpose, token);
-- actor-store implementation
-- Create Repo Root Table
CREATE TABLE IF NOT EXISTS pds.repo_root (
did character varying PRIMARY KEY,
cid character varying NOT NULL,
rev character varying,
"indexedAt" character varying NOT NULL
);
-- Create Repo Block Table
CREATE TABLE IF NOT EXISTS pds.repo_block (
cid character varying PRIMARY KEY,
"repoRev" character varying NOT NULL,
size integer NOT NULL,
content bytea NOT NULL
);
CREATE INDEX ipld_block_repo_rev_idx
ON pds.repo_block("repoRev", cid);
-- Create Record Table
CREATE TABLE IF NOT EXISTS pds.record (
uri character varying PRIMARY KEY,
cid character varying NOT NULL,
collection character varying NOT NULL,
"rkey" character varying NOT NULL,
"repoRev" character varying,
"indexedAt" character varying NOT NULL,
"takedownRef" character varying
);
CREATE INDEX record_did_cid_idx
ON pds.record(cid);
CREATE INDEX record_did_collection_idx
ON pds.record(collection);
CREATE INDEX record_repo_rev_idx
ON pds.record("repoRev");
-- Create Blob Table
CREATE TABLE IF NOT EXISTS pds.blob (
cid character varying PRIMARY KEY,
"mimeType" character varying NOT NULL,
size integer NOT NULL,
"tempKey" character varying,
width integer,
height integer,
"createdAt" character varying NOT NULL,
"takedownRef" character varying
);
CREATE INDEX blob_tempkey_idx
ON pds.blob("tempKey");
-- Create Record Blob Table
CREATE TABLE IF NOT EXISTS pds.record_blob (
"blobCid" character varying NOT NULL,
"recordUri" integer NOT NULL
);
ALTER TABLE ONLY pds.record_blob
DROP CONSTRAINT IF EXISTS record_blob_pkey;
ALTER TABLE ONLY pds.record_blob
ADD CONSTRAINT record_blob_pkey PRIMARY KEY ("blobCid","recordUri");
-- Create Backlink Table
CREATE TABLE IF NOT EXISTS pds.backlink (
uri character varying NOT NULL,
path character varying NOT NULL,
"linkTo" character varying NOT NULL
);
ALTER TABLE ONLY pds.backlink
DROP CONSTRAINT IF EXISTS backlink_pkey;
ALTER TABLE ONLY pds.backlink
ADD CONSTRAINT backlink_pkey PRIMARY KEY (uri, path);
CREATE INDEX backlink_link_to_idx
ON pds.backlink(path, "linkTo");
-- Create Account Preferences Table
CREATE TABLE IF NOT EXISTS pds.account_pref (
id SERIAL PRIMARY KEY,
name character varying NOT NULL,
"valueJson" text
);
-- did-cache implementation
-- Create DID Cache Table
CREATE TABLE IF NOT EXISTS pds.did_doc (
did character varying PRIMARY KEY,
doc text NOT NULL,
"updatedAt" bigint NOT NULL
);
-- sequencer implementation
-- Create Repo Sequence Table
CREATE TABLE IF NOT EXISTS pds.repo_seq (
seq bigserial PRIMARY KEY,
did character varying NOT NULL,
"eventType" character varying NOT NULL,
event bytea NOT NULL,
invalidated smallint NOT NULL DEFAULT 0,
"sequencedAt" character varying NOT NULL
);
CREATE INDEX repo_seq_did_idx -- for filtering seqs based on did
ON pds.repo_seq(did);
CREATE INDEX repo_seq_event_type_idx -- for filtering seqs based on event type
ON pds.repo_seq("eventType");
CREATE INDEX repo_seq_sequenced_at_index -- for entering into the seq stream at a particular time
ON pds.repo_seq("sequencedAt");

View File

@ -1,4 +1,5 @@
/*{"handle":"rudy-alt-3.blacksky.app","password":"H5r%FH@%hhg6rvYF","email":"him+testcreate@rudyfraser.com","inviteCode":"blacksky-app-fqytt-7473e"}*/
use std::env;
use rocket::serde::json::Json;
use rocket::response::status;
use rocket::http::Status;
@ -7,10 +8,10 @@ use std::time::SystemTime;
use chrono::offset::Utc as UtcOffset;
use chrono::DateTime;
use crate::DbConn;
use rsky_lexicon::com::atproto::server::{CreateAccountInput, CreateAccountOutput, CreateInviteCodeOutput};
use rsky_lexicon::com::atproto::server::{CreateAccountInput, CreateAccountOutput};
use crate::models::{InternalErrorMessageResponse, InternalErrorCode};
use email_address::*;
use secp256k1::{Secp256k1, Keypair};
use secp256k1::{Secp256k1, Keypair, SecretKey};
#[rocket::post("/xrpc/com.atproto.server.createAccount", format = "json", data = "<body>")]
pub async fn create_account(
@ -29,6 +30,9 @@ pub async fn create_account(
if body.password.is_none() {
error_msg = Some("Password is required".to_owned());
};
if body.did.is_some() {
error_msg = Some("Not yet allowing people to bring their own DID".to_owned());
};
if let Some(email) = &body.email {
let e_slice: &str = &email[..]; // take a full slice of the string
if !EmailAddress::is_valid(e_slice) {
@ -72,18 +76,16 @@ pub async fn create_account(
}
// TO DO: Lookup user by email as well
// TO DO: If not DID provided in input, use recovery key to create a new one
// determine the did & any plc ops we need to send
// if the provided did document is poorly setup, we throw
// const signingKey = await Secp256k1Keypair.create({ exportable: true })
// const { did, plcOp } = input.did
// ? await validateExistingDid(ctx, handle, input.did, signingKey)
// : await createDidAndPlcOp(ctx, handle, input, signingKey)
// TO DO: Create Repo in storage
let did;
let secp = Secp256k1::new();
let signing_key = Keypair::new(&secp, &mut rand::thread_rng());
let private_key = env::var("PDS_REPO_SIGNING_KEY_K256_PRIVATE_KEY_HEX").unwrap();
let secret_key = SecretKey::from_slice(&hex::decode(private_key.as_bytes()).unwrap()).unwrap();
let signing_key = Keypair::from_secret_key(&secp, &secret_key);
match super::create_did_and_plc_op(
&body.handle,
&body,
signing_key) {
Ok(did_resp) => did = did_resp,
Err(error) => {
@ -98,12 +100,6 @@ pub async fn create_account(
));
}
}
/*
if let Some(input_did) = &body.did {
did = super::validate_existing_did(&body.handle, input_did, signing_key);
} else {
did = super::create_did_and_plc_op(&body.handle, body.into_inner(), signing_key).await?;
}*/
let system_time = SystemTime::now();
let dt: DateTime<UtcOffset> = system_time.into();

View File

@ -6,7 +6,7 @@ use rand::{distributions::Alphanumeric, Rng};
use crate::models::*;
use anyhow::{Result};
use secp256k1::{
generate_keypair, Keypair, Message, SecretKey, PublicKey
Secp256k1, Keypair, Message, SecretKey, PublicKey
};
use sha2::{Sha256, Digest};
use multibase::Base::Base58Btc;
@ -15,6 +15,7 @@ use data_encoding::BASE32;
use reqwest;
use serde_json::{Value};
use indexmap::IndexMap;
use rsky_lexicon::com::atproto::server::CreateAccountInput;
// Important to user `preserve_order` with serde_json so these bytes are ordered
// correctly when encoding.
@ -113,7 +114,7 @@ pub fn sign(
// Encode object to json before dag-cbor because serde_ipld_dagcbor doesn't properly
// sort by keys
let json = serde_json::to_string(&genesis).unwrap();
// Deserialize to IndexMap to preserve key order. serde_ipld_dagcbor does not sort nested
// Deserialize to IndexMap with preserve key order enabled. serde_ipld_dagcbor does not sort nested
// objects properly by keys
let map_genesis: IndexMap<String, Value> = serde_json::from_str(&json).unwrap();
@ -160,17 +161,34 @@ pub fn encode_did_key(
pub fn create_did_and_plc_op(
handle: &str,
//input: CreateAccountInput,
_signing_key: Keypair
input: &CreateAccountInput,
signing_key: Keypair
) -> Result<String> {
let (secret_key, public_key) = generate_keypair(&mut rand::thread_rng());
let secp = Secp256k1::new();
let private_key: String;
if let Some(recovery_key) = &input.recovery_key {
private_key = recovery_key.clone();
} else {
private_key = env::var("PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX").unwrap();
}
let decoded_key = hex::decode(private_key.as_bytes())
.map_err(|error| {
let context = format!("Issue decoding hex '{}'", private_key);
anyhow::Error::new(error).context(context)
})?;
let secret_key = SecretKey::from_slice(&decoded_key)
.map_err(|error| {
let context = format!("Issue creating secret key from input '{}'", private_key);
anyhow::Error::new(error).context(context)
})?;
let public_key = secret_key.public_key(&secp);
println!("Generating and signing PLC directory genesis operation...");
let mut create_op = PlcGenesisOperation {
r#type: "plc_operation".to_owned(),
rotation_keys: vec![encode_did_key(&public_key)],
verification_methods: PlcGenesisVerificationMethods {
atproto: encode_did_key(&public_key)
atproto: encode_did_key(&signing_key.public_key())
},
also_known_as: vec![format!("at://{handle}")],
services: PlcGenesisServices {
@ -202,21 +220,28 @@ pub fn create_did_and_plc_op(
env::var("PLC_SERVER").unwrap_or("plc.directory".to_owned()),
did_plc);
let client = reqwest::blocking::Client::new();
let response = client
let mut response = client
.post(plc_url)
.json(&create_op)
.header("Connection", "Keep-Alive")
.header("Keep-Alive", "timeout=5, max=1000")
.send()?;
println!("Response from server: {:#?}", response.text()?);
Ok(did_plc.into())
let mut buf: Vec<u8> = vec![];
response.copy_to(&mut buf)?;
let resp_msg = String::from_utf8(buf).unwrap();
match response.error_for_status() {
Ok(_res) => Ok(did_plc.into()),
Err(error) => Err(anyhow::Error::new(error).context(resp_msg))
}
}
/*pub fn validate_existing_did(
/*
pub fn validate_existing_did(
handle: &str,
input_did: &str,
signing_key: Keypair
) -> Result<String> {
todo!()
}*/
pub mod confirm_email;

View File

@ -3,37 +3,61 @@
#![allow(unused)]
#![allow(clippy::all)]
use chrono::DateTime;
use chrono::offset::Utc;
use diesel::prelude::*;
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(did))]
#[diesel(table_name = crate::schema::pds::account)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Account {
pub did: String,
pub email: String,
#[diesel(column_name = recoveryKey)]
#[serde(rename = "recoveryKey")]
pub recovery_key: Option<String>,
pub password: String,
#[diesel(column_name = createdAt)]
#[serde(rename = "createdAt")]
pub created_at: String,
#[diesel(column_name = invitesDisabled)]
#[serde(rename = "invitesDisabled")]
pub invites_disabled: i16,
#[diesel(column_name = emailConfirmedAt)]
#[serde(rename = "emailConfirmedAt")]
pub email_confirmed_at: Option<String>,
}
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(table_name = crate::schema::pds::account_pref)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct AccountPref {
pub id: i32,
pub name: String,
#[diesel(column_name = valueJson)]
#[serde(rename = "valueJson")]
pub value_json: Option<String>,
}
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(did))]
#[diesel(table_name = crate::schema::pds::actor)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Actor {
pub did: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub handle: Option<String>,
#[diesel(column_name = createdAt)]
#[serde(rename = "createdAt")]
pub created_at: String,
#[diesel(column_name = takedownRef)]
#[serde(rename = "takedownRef", skip_serializing_if = "Option::is_none")]
#[serde(rename = "takedownRef")]
pub takedown_ref: Option<String>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(table_name = crate::schema::pds::app_migration)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct AppMigration {
pub id: String,
pub success: i16,
#[serde(rename = "completedAt", skip_serializing_if = "Option::is_none")]
pub completed_at: Option<String>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(did, name))]
#[diesel(table_name = crate::schema::pds::app_password)]
#[diesel(check_for_backend(diesel::pg::Pg))]
@ -41,65 +65,59 @@ pub struct AppPassword {
pub did: String,
pub name: String,
pub password: String,
#[diesel(column_name = createdAt)]
#[serde(rename = "createdAt")]
pub created_at: String,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(uri, path))]
#[diesel(table_name = crate::schema::pds::backlink)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Backlink {
pub uri: String,
pub path: String,
#[serde(rename = "linkToUri", skip_serializing_if = "Option::is_none")]
pub link_to_uri: Option<String>,
#[serde(rename = "linkToDid", skip_serializing_if = "Option::is_none")]
pub link_to_did: Option<String>,
#[diesel(column_name = linkTo)]
#[serde(rename = "linkTo")]
pub link_to: String,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(creator, cid))]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(cid))]
#[diesel(table_name = crate::schema::pds::blob)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Blob {
pub creator: String,
pub cid: String,
#[diesel(column_name = mimeType)]
#[serde(rename = "mimeType")]
pub mime_type: String,
pub size: i32,
#[serde(rename = "tempKey", skip_serializing_if = "Option::is_none")]
#[diesel(column_name = tempKey)]
#[serde(rename = "tempKey")]
pub temp_key: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub width: Option<i32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub height: Option<i32>,
#[diesel(column_name = createdAt)]
#[serde(rename = "createdAt")]
pub created_at: String,
#[diesel(column_name = takedownRef)]
#[serde(rename = "takedownRef")]
pub takedown_ref: Option<String>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(did))]
#[diesel(table_name = crate::schema::pds::did_cache)]
#[diesel(table_name = crate::schema::pds::did_doc)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct DidCache {
pub struct DidDoc {
pub did: String,
pub doc: String,
#[diesel(column_name = updatedAt)]
#[serde(rename = "updatedAt")]
pub updated_at: i64,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(did))]
#[diesel(table_name = crate::schema::pds::did_handle)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct DidHandle {
pub did: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub handle: Option<String>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(purpose, did))]
#[diesel(table_name = crate::schema::pds::email_token)]
#[diesel(check_for_backend(diesel::pg::Pg))]
@ -107,252 +125,135 @@ pub struct EmailToken {
pub purpose: String,
pub did: String,
pub token: String,
#[diesel(column_name = requestedAt)]
#[serde(rename = "requestedAt")]
pub requested_at: DateTime<Utc>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(code))]
#[diesel(table_name = crate::schema::pds::invite_code)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct InviteCode {
pub code: String,
#[diesel(column_name = availableUses)]
#[serde(rename = "availableUses")]
pub available_uses: i32,
pub disabled: i16,
#[serde(rename = "forUser")]
pub for_user: String,
#[diesel(column_name = forAccount)]
#[serde(rename = "forAccount")]
pub for_account: String,
#[diesel(column_name = createdBy)]
#[serde(rename = "createdBy")]
pub created_by: String,
#[diesel(column_name = createdAt)]
#[serde(rename = "createdAt")]
pub created_at: String,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(code, used_by))]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(code, usedBy))]
#[diesel(table_name = crate::schema::pds::invite_code_use)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct InviteCodeUse {
pub code: String,
#[diesel(column_name = usedBy)]
#[serde(rename = "usedBy")]
pub used_by: String,
#[diesel(column_name = usedAt)]
#[serde(rename = "usedAt")]
pub used_at: String,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(creator, cid))]
#[diesel(table_name = crate::schema::pds::ipld_block)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct IpldBlock {
pub creator: String,
pub cid: String,
pub size: i32,
pub content: Vec<u8>,
#[serde(rename = "repoRev", skip_serializing_if = "Option::is_none")]
pub repo_rev: Option<String>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(table_name = crate::schema::pds::moderation_action)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct ModerationAction {
pub id: i32,
pub action: String,
#[serde(rename = "subjectType")]
pub subject_type: String,
#[serde(rename = "subjectDid")]
pub subject_did: String,
#[serde(rename = "subjectUri", skip_serializing_if = "Option::is_none")]
pub subject_uri: Option<String>,
#[serde(rename = "subjectCid", skip_serializing_if = "Option::is_none")]
pub subject_cid: Option<String>,
pub reason: String,
#[serde(rename = "createdAt")]
pub created_at: String,
#[serde(rename = "createdBy")]
pub created_by: String,
#[serde(rename = "reversedAt", skip_serializing_if = "Option::is_none")]
pub reversed_at: Option<String>,
#[serde(rename = "reversedBy", skip_serializing_if = "Option::is_none")]
pub reversed_by: Option<String>,
#[serde(rename = "reversedReason", skip_serializing_if = "Option::is_none")]
pub reversed_reason: Option<String>,
#[serde(rename = "createLabelVals", skip_serializing_if = "Option::is_none")]
pub create_label_vals: Option<String>,
#[serde(rename = "negateLabelVals", skip_serializing_if = "Option::is_none")]
pub negate_label_vals: Option<String>,
#[serde(rename = "durationInHours", skip_serializing_if = "Option::is_none")]
pub duration_in_hours: Option<i32>,
#[serde(rename = "expiresAt", skip_serializing_if = "Option::is_none")]
pub expires_at: Option<String>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(action_id, cid, record_uri))]
#[diesel(table_name = crate::schema::pds::moderation_action_subject_blob)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct ModerationActionSubjectBlob {
pub id: i32,
#[serde(rename = "actionId")]
pub action_id: i32,
pub cid: String,
#[serde(rename = "recordUri")]
pub record_uri: String,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(table_name = crate::schema::pds::moderation_report)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct ModerationReport {
pub id: i32,
#[serde(rename = "subjectType")]
pub subject_type: String,
#[serde(rename = "subjectDid")]
pub subject_did: String,
#[serde(rename = "subjectUri", skip_serializing_if = "Option::is_none")]
pub subject_uri: Option<String>,
#[serde(rename = "subjectCid", skip_serializing_if = "Option::is_none")]
pub subject_cid: Option<String>,
#[serde(rename = "reasonType")]
pub reason_type: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub reason: Option<String>,
#[serde(rename = "reportedByDid")]
pub reported_by_did: String,
#[serde(rename = "createdAt")]
pub created_at: String,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(report_id, action_id))]
#[diesel(table_name = crate::schema::pds::moderation_report_resolution)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct ModerationReportResolution {
#[serde(rename = "reportId")]
pub report_id: i32,
#[serde(rename = "actionId")]
pub action_id: i32,
#[serde(rename = "createdAt")]
pub created_at: String,
#[serde(rename = "createdBy")]
pub created_by: String,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(uri))]
#[diesel(table_name = crate::schema::pds::record)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Record {
pub uri: String,
pub cid: String,
pub did: String,
pub collection: String,
pub rkey: String,
#[diesel(column_name = repoRev)]
#[serde(rename = "repoRev")]
pub repo_rev: Option<String>,
#[diesel(column_name = indexedAt)]
#[serde(rename = "indexedAt")]
pub indexed_at: String,
#[serde(rename = "takedownRef", skip_serializing_if = "Option::is_none")]
#[diesel(column_name = takedownRef)]
#[serde(rename = "takedownRef")]
pub takedown_ref: Option<String>,
#[serde(rename = "repoRev", skip_serializing_if = "Option::is_none")]
pub repo_rev: Option<String>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(blobCid, recordUri))]
#[diesel(table_name = crate::schema::pds::record_blob)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct RecordBlob {
#[diesel(column_name = blobCid)]
#[serde(rename = "blobCid")]
pub blob_cid: String,
#[diesel(column_name = recordUri)]
#[serde(rename = "recordUri")]
pub record_uri: i32,
}
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(table_name = crate::schema::pds::refresh_token)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct RefreshToken {
pub id: String,
pub did: String,
#[diesel(column_name = expiresAt)]
#[serde(rename = "expiresAt")]
pub expires_at: String,
#[serde(rename = "nextId", skip_serializing_if = "Option::is_none")]
#[diesel(column_name = nextId)]
#[serde(rename = "nextId")]
pub next_id: Option<String>,
#[serde(rename = "appPasswordName", skip_serializing_if = "Option::is_none")]
#[diesel(column_name = appPasswordName)]
#[serde(rename = "appPasswordName")]
pub app_password_name: Option<String>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(cid, record_uri))]
#[diesel(table_name = crate::schema::pds::repo_blob)]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(cid))]
#[diesel(table_name = crate::schema::pds::repo_block)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct RepoBlob {
pub struct RepoBlock {
pub cid: String,
#[serde(rename = "recordUri")]
pub record_uri: String,
pub did: String,
#[serde(rename = "takedownRef", skip_serializing_if = "Option::is_none")]
pub takedown_ref: Option<String>,
#[serde(rename = "repoRev", skip_serializing_if = "Option::is_none")]
pub repo_rev: Option<String>,
#[diesel(column_name = repoRev)]
#[serde(rename = "repoRev")]
pub repo_rev: String,
pub size: i32,
pub content: Vec<u8>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(did))]
#[diesel(table_name = crate::schema::pds::repo_root)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct RepoRoot {
pub did: String,
pub root: String,
pub cid: String,
pub rev: Option<String>,
#[diesel(column_name = indexedAt)]
#[serde(rename = "indexedAt")]
pub indexed_at: String,
#[serde(rename = "takedownRef", skip_serializing_if = "Option::is_none")]
pub takedown_ref: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub rev: Option<String>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[derive(Queryable, Identifiable, Selectable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(seq))]
#[diesel(table_name = crate::schema::pds::repo_seq)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct RepoSeq {
pub id: i64,
#[serde(skip_serializing_if = "Option::is_none")]
pub seq: Option<i64>,
pub seq: i64,
pub did: String,
#[diesel(column_name = eventType)]
#[serde(rename = "eventType")]
pub event_type: String,
pub event: Vec<u8>,
pub invalidated: i16,
#[diesel(column_name = sequencedAt)]
#[serde(rename = "sequencedAt")]
pub sequenced_at: String,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(name))]
#[diesel(table_name = crate::schema::pds::runtime_flag)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct RuntimeFlag {
pub name: String,
pub value: String,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(primary_key(did))]
#[diesel(table_name = crate::schema::pds::user_account)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct UserAccount {
pub did: String,
pub email: String,
pub password: String,
#[serde(rename = "recoveryKey", skip_serializing_if = "Option::is_none")]
pub recovery_key: Option<String>,
#[serde(rename = "createdAt")]
pub created_at: String,
#[serde(rename = "invitesDisabled")]
pub invites_disabled: i16,
#[serde(rename = "inviteNote", skip_serializing_if = "Option::is_none")]
pub invite_note: Option<String>,
#[serde(rename = "emailConfirmedAt", skip_serializing_if = "Option::is_none")]
pub email_confirmed_at: Option<String>,
}
#[derive(Queryable, Identifiable, Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
#[diesel(table_name = crate::schema::pds::user_pref)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct UserPref {
pub id: i64,
pub did: String,
pub name: String,
#[serde(rename = "valueJson")]
pub value_json: String,
}

View File

@ -1,6 +1,26 @@
// @generated automatically by Diesel CLI.
pub mod pds {
diesel::table! {
pds.account (did) {
did -> Varchar,
email -> Varchar,
recoveryKey -> Nullable<Varchar>,
password -> Varchar,
createdAt -> Varchar,
invitesDisabled -> Int2,
emailConfirmedAt -> Nullable<Varchar>,
}
}
diesel::table! {
pds.account_pref (id) {
id -> Int4,
name -> Varchar,
valueJson -> Nullable<Text>,
}
}
diesel::table! {
pds.actor (did) {
did -> Varchar,
@ -10,14 +30,6 @@ pub mod pds {
}
}
diesel::table! {
pds.app_migration (id) {
id -> Varchar,
success -> Int2,
completedAt -> Nullable<Varchar>,
}
}
diesel::table! {
pds.app_password (did, name) {
did -> Varchar,
@ -31,14 +43,12 @@ pub mod pds {
pds.backlink (uri, path) {
uri -> Varchar,
path -> Varchar,
linkToUri -> Nullable<Varchar>,
linkToDid -> Nullable<Varchar>,
linkTo -> Varchar,
}
}
diesel::table! {
pds.blob (creator, cid) {
creator -> Varchar,
pds.blob (cid) {
cid -> Varchar,
mimeType -> Varchar,
size -> Int4,
@ -46,24 +56,18 @@ pub mod pds {
width -> Nullable<Int4>,
height -> Nullable<Int4>,
createdAt -> Varchar,
takedownRef -> Nullable<Varchar>,
}
}
diesel::table! {
pds.did_cache (did) {
pds.did_doc (did) {
did -> Varchar,
doc -> Text,
updatedAt -> Int8,
}
}
diesel::table! {
pds.did_handle (did) {
did -> Varchar,
handle -> Nullable<Varchar>,
}
}
diesel::table! {
pds.email_token (purpose, did) {
purpose -> Varchar,
@ -78,7 +82,7 @@ pub mod pds {
code -> Varchar,
availableUses -> Int4,
disabled -> Int2,
forUser -> Varchar,
forAccount -> Varchar,
createdBy -> Varchar,
createdAt -> Varchar,
}
@ -92,79 +96,22 @@ pub mod pds {
}
}
diesel::table! {
pds.ipld_block (creator, cid) {
creator -> Varchar,
cid -> Varchar,
size -> Int4,
content -> Bytea,
repoRev -> Nullable<Varchar>,
}
}
diesel::table! {
pds.moderation_action (id) {
id -> Int4,
action -> Varchar,
subjectType -> Varchar,
subjectDid -> Varchar,
subjectUri -> Nullable<Varchar>,
subjectCid -> Nullable<Varchar>,
reason -> Text,
createdAt -> Varchar,
createdBy -> Varchar,
reversedAt -> Nullable<Varchar>,
reversedBy -> Nullable<Varchar>,
reversedReason -> Nullable<Text>,
createLabelVals -> Nullable<Varchar>,
negateLabelVals -> Nullable<Varchar>,
durationInHours -> Nullable<Int4>,
expiresAt -> Nullable<Varchar>,
}
}
diesel::table! {
pds.moderation_action_subject_blob (actionId, cid, recordUri) {
id -> Int4,
actionId -> Int4,
cid -> Varchar,
recordUri -> Varchar,
}
}
diesel::table! {
pds.moderation_report (id) {
id -> Int4,
subjectType -> Varchar,
subjectDid -> Varchar,
subjectUri -> Nullable<Varchar>,
subjectCid -> Nullable<Varchar>,
reasonType -> Varchar,
reason -> Nullable<Text>,
reportedByDid -> Varchar,
createdAt -> Varchar,
}
}
diesel::table! {
pds.moderation_report_resolution (reportId, actionId) {
reportId -> Int4,
actionId -> Int4,
createdAt -> Varchar,
createdBy -> Varchar,
}
}
diesel::table! {
pds.record (uri) {
uri -> Varchar,
cid -> Varchar,
did -> Varchar,
collection -> Varchar,
rkey -> Varchar,
repoRev -> Nullable<Varchar>,
indexedAt -> Varchar,
takedownRef -> Nullable<Varchar>,
repoRev -> Nullable<Varchar>,
}
}
diesel::table! {
pds.record_blob (blobCid, recordUri) {
blobCid -> Varchar,
recordUri -> Int4,
}
}
@ -179,29 +126,26 @@ pub mod pds {
}
diesel::table! {
pds.repo_blob (cid, recordUri) {
pds.repo_block (cid) {
cid -> Varchar,
recordUri -> Varchar,
did -> Varchar,
takedownRef -> Nullable<Varchar>,
repoRev -> Nullable<Varchar>,
repoRev -> Varchar,
size -> Int4,
content -> Bytea,
}
}
diesel::table! {
pds.repo_root (did) {
did -> Varchar,
root -> Varchar,
indexedAt -> Varchar,
takedownRef -> Nullable<Varchar>,
cid -> Varchar,
rev -> Nullable<Varchar>,
indexedAt -> Varchar,
}
}
diesel::table! {
pds.repo_seq (id) {
id -> Int8,
seq -> Nullable<Int8>,
pds.repo_seq (seq) {
seq -> Int8,
did -> Varchar,
eventType -> Varchar,
event -> Bytea,
@ -210,62 +154,22 @@ pub mod pds {
}
}
diesel::table! {
pds.runtime_flag (name) {
name -> Varchar,
value -> Varchar,
}
}
diesel::table! {
pds.user_account (did) {
did -> Varchar,
email -> Varchar,
recoveryKey -> Nullable<Varchar>,
password -> Varchar,
createdAt -> Varchar,
invitesDisabled -> Int2,
inviteNote -> Nullable<Varchar>,
emailConfirmedAt -> Nullable<Varchar>,
}
}
diesel::table! {
pds.user_pref (id) {
id -> Int8,
did -> Varchar,
name -> Varchar,
valueJson -> Text,
}
}
diesel::joinable!(moderation_action_subject_blob -> moderation_action (actionId));
diesel::joinable!(moderation_report_resolution -> moderation_action (actionId));
diesel::joinable!(moderation_report_resolution -> moderation_report (reportId));
diesel::allow_tables_to_appear_in_same_query!(
account,
account_pref,
actor,
app_migration,
app_password,
backlink,
blob,
did_cache,
did_handle,
did_doc,
email_token,
invite_code,
invite_code_use,
ipld_block,
moderation_action,
moderation_action_subject_blob,
moderation_report,
moderation_report_resolution,
record,
record_blob,
refresh_token,
repo_blob,
repo_block,
repo_root,
repo_seq,
runtime_flag,
user_account,
user_pref,
);
}