Browse Source

Initial commit

master
Jared Bell 1 month ago
commit
13ba3924e3
58 changed files with 5184 additions and 0 deletions
  1. +2
    -0
      .env
  2. +1
    -0
      .gitignore
  3. +8
    -0
      .idea/.gitignore
  4. +12
    -0
      .idea/dataSources.xml
  5. +8
    -0
      .idea/modules.xml
  6. +11
    -0
      .idea/puffpastry-backend.iml
  7. +9
    -0
      .idea/sqldialects.xml
  8. +6
    -0
      .idea/vcs.xml
  9. +2785
    -0
      Cargo.lock
  10. +21
    -0
      Cargo.toml
  11. +23
    -0
      README_OPENAPI.md
  12. +3
    -0
      build.rs
  13. +9
    -0
      diesel.toml
  14. +6
    -0
      migrations/00000000000000_diesel_initial_setup/down.sql
  15. +36
    -0
      migrations/00000000000000_diesel_initial_setup/up.sql
  16. +1
    -0
      migrations/2025-08-04-003151_create_proposals/down.sql
  17. +11
    -0
      migrations/2025-08-04-003151_create_proposals/up.sql
  18. +2
    -0
      migrations/2025-08-08-035440_create_amendments/down.sql
  19. +20
    -0
      migrations/2025-08-08-035440_create_amendments/up.sql
  20. +1
    -0
      migrations/2025-08-13-202500_create_users/down.sql
  21. +17
    -0
      migrations/2025-08-13-202500_create_users/up.sql
  22. +1
    -0
      migrations/2025-08-13-232400_create_comments/down.sql
  23. +12
    -0
      migrations/2025-08-13-232400_create_comments/up.sql
  24. +7
    -0
      migrations/2025-08-20-200000_create_visits/down.sql
  25. +27
    -0
      migrations/2025-08-20-200000_create_visits/up.sql
  26. +1
    -0
      migrations/2025-08-25-235310_add_full_name_field/down.sql
  27. +1
    -0
      migrations/2025-08-25-235310_add_full_name_field/up.sql
  28. +26
    -0
      src/db/db.rs
  29. +1
    -0
      src/db/mod.rs
  30. +82
    -0
      src/ipfs/amendment.rs
  31. +106
    -0
      src/ipfs/comment.rs
  32. +7
    -0
      src/ipfs/ipfs.rs
  33. +4
    -0
      src/ipfs/mod.rs
  34. +81
    -0
      src/ipfs/proposal.rs
  35. +63
    -0
      src/main.rs
  36. +49
    -0
      src/repositories/amendment.rs
  37. +76
    -0
      src/repositories/comment.rs
  38. +5
    -0
      src/repositories/mod.rs
  39. +48
    -0
      src/repositories/proposal.rs
  40. +107
    -0
      src/repositories/user.rs
  41. +94
    -0
      src/repositories/visit.rs
  42. +102
    -0
      src/routes/amendment.rs
  43. +200
    -0
      src/routes/auth.rs
  44. +18
    -0
      src/routes/comment.rs
  45. +5
    -0
      src/routes/mod.rs
  46. +251
    -0
      src/routes/openapi.rs
  47. +157
    -0
      src/routes/proposal.rs
  48. +100
    -0
      src/schema.rs
  49. +117
    -0
      src/types/amendment.rs
  50. +50
    -0
      src/types/comment.rs
  51. +3
    -0
      src/types/ipfs.rs
  52. +4
    -0
      src/types/mod.rs
  53. +127
    -0
      src/types/proposal.rs
  54. +85
    -0
      src/utils/auth.rs
  55. +7
    -0
      src/utils/content.rs
  56. +16
    -0
      src/utils/env.rs
  57. +148
    -0
      src/utils/ipfs.rs
  58. +4
    -0
      src/utils/mod.rs

+ 2
- 0
.env View File

@@ -0,0 +1,2 @@
DATABASE_URL=postgres://postgres:MxS2p37ViXtXikeb@localhost/puffpastry_jv
JWT_SECRET="Look for the union label, you crazy diamond"

+ 1
- 0
.gitignore View File

@@ -0,0 +1 @@
/target

+ 8
- 0
.idea/.gitignore View File

@@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

+ 12
- 0
.idea/dataSources.xml View File

@@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
<data-source source="LOCAL" name="postgres@localhost" uuid="68351c5b-378b-43b9-b884-552cec2dc79f">
<driver-ref>postgresql</driver-ref>
<synchronize>true</synchronize>
<jdbc-driver>org.postgresql.Driver</jdbc-driver>
<jdbc-url>jdbc:postgresql://localhost:5432/postgres</jdbc-url>
<working-dir>$ProjectFileDir$</working-dir>
</data-source>
</component>
</project>

+ 8
- 0
.idea/modules.xml View File

@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/puffpastry-backend.iml" filepath="$PROJECT_DIR$/.idea/puffpastry-backend.iml" />
</modules>
</component>
</project>

+ 11
- 0
.idea/puffpastry-backend.iml View File

@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="EMPTY_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

+ 9
- 0
.idea/sqldialects.xml View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SqlDialectMappings">
<file url="file://$PROJECT_DIR$/migrations/2025-08-04-003151_create_proposals/up.sql" dialect="PostgreSQL" />
<file url="file://$PROJECT_DIR$/migrations/2025-08-08-035440_create_amendments/up.sql" dialect="PostgreSQL" />
<file url="file://$PROJECT_DIR$/migrations/2025-08-13-232400_create_comments/up.sql" dialect="PostgreSQL" />
<file url="PROJECT" dialect="PostgreSQL" />
</component>
</project>

+ 6
- 0
.idea/vcs.xml View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

+ 2785
- 0
Cargo.lock
File diff suppressed because it is too large
View File


+ 21
- 0
Cargo.toml View File

@@ -0,0 +1,21 @@
[package]
name = "puffpastry-backend"
version = "0.1.0"
edition = "2024"

[dependencies]
actix-web = "4.11.0"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.142"
ipfs-api-backend-actix = "0.7.0"
uuid = { version = "1.17.0", features = ["v4"] }
diesel = { version = "2.2.12", features = ["postgres", "chrono", "r2d2"] }
dotenvy = "0.15.7"
chrono = { version = "0.4.41", features = ["serde"] }
futures = "0.3.31"
diesel-derive-enum = { version = "3.0.0-beta.1", features = ["postgres"] }
argon2 = "0.5"
base64 = "0.22"
ed25519-dalek = { version = "2.1", features = ["rand_core"] }
stellar-strkey = "0.0.13"
jsonwebtoken = "9"

+ 23
- 0
README_OPENAPI.md View File

@@ -0,0 +1,23 @@
PuffPastry Backend OpenAPI and Frontend SDK Generation

This backend exposes an OpenAPI 3.0 document at:

GET http://localhost:7300/openapi.json

You can generate a TypeScript client for your frontend using openapi-typescript-codegen:

1) Install the generator (once):
npm install -g openapi-typescript-codegen

2) Run code generation (adjust output path to your frontend project):
openapi --input http://localhost:7300/openapi.json --output ../frontend/src/api --client axios

If you prefer, you can output a local file first:

curl -s http://localhost:7300/openapi.json -o openapi.json
openapi --input openapi.json --output ../frontend/src/api --client axios

Notes
- The current OpenAPI spec is minimal and focuses on request shapes and endpoints. It’s sufficient for client generation.
- We can later switch to automatic spec generation with Apistos annotations without changing the endpoint path.
- If you add new endpoints, update the spec in src/routes/openapi.rs accordingly, or let’s enhance it with Apistos once we decide on the annotation strategy.

+ 3
- 0
build.rs View File

@@ -0,0 +1,3 @@
fn main() {
println!("cargo:rustc-link-search=native=/Library/PostgreSQL/17/lib");
}

+ 9
- 0
diesel.toml View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli

[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]

[migrations_directory]
dir = "/Users/jaredbell/RustroverProjects/puffpastry-backend/migrations"

+ 6
- 0
migrations/00000000000000_diesel_initial_setup/down.sql View File

@@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.

DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

+ 36
- 0
migrations/00000000000000_diesel_initial_setup/up.sql View File

@@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.




-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;

CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

+ 1
- 0
migrations/2025-08-04-003151_create_proposals/down.sql View File

@@ -0,0 +1 @@
drop table proposals;

+ 11
- 0
migrations/2025-08-04-003151_create_proposals/up.sql View File

@@ -0,0 +1,11 @@
create table proposals (
id serial primary key,
name text not null,
cid text not null,
summary text,
creator text not null,
is_current boolean not null, -- true for latest/current version
previous_cid text, -- the CID of the previous version, if any
created_at timestamp default now(),
updated_at timestamp
)

+ 2
- 0
migrations/2025-08-08-035440_create_amendments/down.sql View File

@@ -0,0 +1,2 @@
drop table amendments;
drop type amendment_status;

+ 20
- 0
migrations/2025-08-08-035440_create_amendments/up.sql View File

@@ -0,0 +1,20 @@
create type amendment_status as enum (
'proposed',
'approved',
'withdrawn',
'rejected'
);

create table amendments (
id serial primary key,
name text not null,
cid text not null,
summary text,
status amendment_status not null default 'proposed',
creator text not null,
is_current boolean not null default true,
previous_cid text,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
proposal_id integer not null references proposals(id)
)

+ 1
- 0
migrations/2025-08-13-202500_create_users/down.sql View File

@@ -0,0 +1 @@
drop table users;

+ 17
- 0
migrations/2025-08-13-202500_create_users/up.sql View File

@@ -0,0 +1,17 @@
-- Users table to hold account information
-- Postgres dialect

create table users (
id serial primary key,
username text not null unique,
password_hash text not null,
stellar_address text not null unique,
email text unique,
is_active boolean not null default true,
roles text[] not null default '{}',
last_login_at timestamptz,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);

-- Optional helpful indexes (unique already creates indexes, but adding a lower() index for case-insensitive lookups could be considered later)

+ 1
- 0
migrations/2025-08-13-232400_create_comments/down.sql View File

@@ -0,0 +1 @@
drop table comments;

+ 12
- 0
migrations/2025-08-13-232400_create_comments/up.sql View File

@@ -0,0 +1,12 @@
-- Comments schema
-- Postgres dialect

create table comments (
id serial primary key,
cid text not null,
is_current bool not null default true,
previous_cid text,
proposal_id integer not null references proposals(id) on delete cascade,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);

+ 7
- 0
migrations/2025-08-20-200000_create_visits/down.sql View File

@@ -0,0 +1,7 @@
drop index if exists idx_visits_method;
drop index if exists idx_visits_path;
drop index if exists idx_visits_proposal_id;
drop index if exists idx_visits_user_id;
drop index if exists idx_visits_visited_at;

drop table if exists visits;

+ 27
- 0
migrations/2025-08-20-200000_create_visits/up.sql View File

@@ -0,0 +1,27 @@
-- Visits tracking table
-- Postgres dialect

create table visits (
id serial primary key,
user_id integer references users(id) on delete set null,
proposal_id integer not null,
path text not null,
method text not null,
query_string text,
status_code integer,
ip_address text,
user_agent text,
referrer text,
session_id text,
request_id text,
visited_at timestamptz not null default now(),
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);

-- Helpful indexes for analytics and lookups
create index idx_visits_visited_at on visits(visited_at);
create index idx_visits_user_id on visits(user_id);
create index idx_visits_proposal_id on visits(proposal_id);
create index idx_visits_path on visits(path);
create index idx_visits_method on visits(method);

+ 1
- 0
migrations/2025-08-25-235310_add_full_name_field/down.sql View File

@@ -0,0 +1 @@
alter table users drop column full_name;

+ 1
- 0
migrations/2025-08-25-235310_add_full_name_field/up.sql View File

@@ -0,0 +1 @@
alter table users add column full_name text;

+ 26
- 0
src/db/db.rs View File

@@ -0,0 +1,26 @@
use diesel::r2d2::{self, ConnectionManager};
use diesel::PgConnection;
use std::env;
use dotenvy::dotenv;

pub type DbPool = r2d2::Pool<ConnectionManager<PgConnection>>;
pub type DbConn = r2d2::PooledConnection<ConnectionManager<PgConnection>>;

pub fn establish_connection() -> Result<DbPool, Box<dyn std::error::Error>> {
dotenv().ok();
let database_url = env::var("DATABASE_URL")
.map_err(|_| "DATABASE_URL must be set")?;
let manager = ConnectionManager::<PgConnection>::new(database_url);
let pool = r2d2::Pool::builder()
.build(manager)
.map_err(|e| format!("Failed to create pool: {}", e))?;
Ok(pool)
}

pub fn get_connection() -> Result<DbConn, Box<dyn std::error::Error>> {
let pool = establish_connection()?;
pool.get().map_err(|e| -> Box<dyn std::error::Error> { Box::new(e) })
}

+ 1
- 0
src/db/mod.rs View File

@@ -0,0 +1 @@
pub(crate) mod db;

+ 82
- 0
src/ipfs/amendment.rs View File

@@ -0,0 +1,82 @@
use crate::db::db::get_connection;
use crate::ipfs::ipfs::IpfsService;
use crate::schema::amendments;
use crate::types::amendment::{Amendment, AmendmentError, AmendmentFile};
use crate::types::ipfs::IpfsResult;
use crate::utils::content::extract_summary;
use crate::utils::ipfs::{read_json_via_cat, upload_json_and_get_hash, DEFAULT_MAX_JSON_SIZE};
use diesel::ExpressionMethods;
use diesel::QueryDsl;
use diesel::RunQueryDsl;
use ipfs_api_backend_actix::IpfsClient;

const STORAGE_DIR: &str = "/puffpastry/amendments";
const FILE_EXTENSION: &str = "json";

pub struct AmendmentService {
client: IpfsClient,
}

impl IpfsService<AmendmentFile> for AmendmentService {
type Err = AmendmentError;

async fn save(&mut self, item: AmendmentFile) -> IpfsResult<String, Self::Err> {
self.store_amendment_to_ipfs(item).await
}

async fn read(&mut self, hash: String) -> IpfsResult<AmendmentFile, Self::Err> {
self.read_amendment_file(&hash).await
}
}

impl AmendmentService {
pub fn new(client: IpfsClient) -> Self {
Self { client }
}

async fn store_amendment_to_ipfs(&self, amendment: AmendmentFile) -> IpfsResult<String, AmendmentError> {
let amendment_record = Amendment::new(
amendment.name.clone(),
Some(extract_summary(&amendment.content)),
amendment.creator.clone(),
amendment.proposal_id,
);

let hash = self.upload_to_ipfs(&amendment).await?;
self.save_to_database(amendment_record.with_cid(hash.clone()).mark_as_current()).await?;

{
use crate::schema::amendments::dsl::{amendments as amendments_table, is_current as is_current_col, previous_cid as previous_cid_col};
let mut conn = get_connection()
.map_err(|e| AmendmentError::DatabaseError(e))?;

// Ignore the count result; if no rows match, that's fine
let _ = diesel::update(amendments_table.filter(previous_cid_col.eq(hash.clone())))
.set(is_current_col.eq(false))
.execute(&mut conn)
.map_err(|e| AmendmentError::DatabaseError(Box::new(e)))?;
}

Ok(hash)
}
async fn upload_to_ipfs(&self, amendment: &AmendmentFile) -> IpfsResult<String, AmendmentError> {
upload_json_and_get_hash::<AmendmentFile, AmendmentError>(&self.client, STORAGE_DIR, FILE_EXTENSION, amendment).await
}

async fn save_to_database(&self, amendment: Amendment) -> IpfsResult<(), AmendmentError> {
let mut conn = get_connection()
.map_err(|e| AmendmentError::DatabaseError(e))?;

diesel::insert_into(amendments::table)
.values(&amendment)
.execute(&mut conn)
.map_err(|e| AmendmentError::DatabaseError(Box::new(e)))?;

Ok(())
}

async fn read_amendment_file(&self, hash: &str) -> IpfsResult<AmendmentFile, AmendmentError> {
read_json_via_cat::<AmendmentFile, AmendmentError>(&self.client, hash, DEFAULT_MAX_JSON_SIZE).await
}
}

+ 106
- 0
src/ipfs/comment.rs View File

@@ -0,0 +1,106 @@
use crate::ipfs::ipfs::IpfsService;
use crate::types::comment::{CommentError, CommentMetadata};
use crate::types::ipfs::IpfsResult;
use crate::utils::ipfs::{
create_file_path,
create_storage_directory,
read_json_via_cat,
retrieve_content_hash,
save_json_file,
DEFAULT_MAX_JSON_SIZE,
list_directory_file_hashes,
};
use ipfs_api_backend_actix::IpfsClient;

const STORAGE_DIR: &str = "/puffpastry/comments";
const FILE_EXTENSION: &str = "json";

pub struct CommentService {
client: IpfsClient,
}

// Implement per-proposal subdirectory saving. The input is (proposal_cid, comments_batch)
impl IpfsService<(String, Vec<CommentMetadata>)> for CommentService {
type Err = CommentError;

async fn save(&mut self, item: (String, Vec<CommentMetadata>)) -> IpfsResult<String, Self::Err> {
let (proposal_cid, comments) = item;
// Allow batch save within the proposal's subdirectory.
if comments.is_empty() {
return Err(CommentError::from(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Failed to store comment to IPFS: empty comments batch",
)));
}
let mut last_cid: Option<String> = None;
for comment in comments {
let res = self
.store_comment_in_proposal_dir_and_publish(&proposal_cid, comment)
.await;
match res {
Ok(cid) => last_cid = Some(cid),
Err(e) => return Err(e),
}
}
match last_cid {
Some(cid) => Ok(cid),
None => Err(CommentError::from(std::io::Error::new(
std::io::ErrorKind::Other,
"Failed to store comment to IPFS",
))),
}
}

async fn read(&mut self, hash: String) -> IpfsResult<(String, Vec<CommentMetadata>), Self::Err> {
// For reading, the caller should pass a directory hash (CID). We return only the comments vector here.
// Since IpfsService requires returning the same T, include an empty proposal id (unknown in read-by-hash).
// Alternatively, callers should not use the proposal id from this return value.
let comments = self.read_all_comments_in_dir(&hash).await?;
Ok((String::new(), comments))
}
}

impl CommentService {
pub fn new(client: IpfsClient) -> Self {
Self { client }
}

// Writes a single comment JSON file into the MFS comments subdirectory for the proposal,
// then publishes the subdirectory snapshot (by retrieving the directory CID). Returns the comment file CID.
async fn store_comment_in_proposal_dir_and_publish(
&self,
proposal_cid: &str,
comment: CommentMetadata,
) -> IpfsResult<String, CommentError> {
// Ensure the per-proposal storage subdirectory exists
let proposal_dir = format!("{}/{}", STORAGE_DIR, proposal_cid);
create_storage_directory::<CommentError>(&self.client, &proposal_dir).await?;

// Create a unique file path within the proposal subdirectory and save the JSON content
let file_path = create_file_path(&proposal_dir, FILE_EXTENSION);
save_json_file::<CommentMetadata, CommentError>(&self.client, &file_path, &comment).await?;

// Retrieve the file's CID to return to the caller
let file_cid = retrieve_content_hash::<CommentError>(&self.client, &file_path).await?;

// Publish a new snapshot of the proposal's comments subdirectory by retrieving its CID
let _dir_cid = retrieve_content_hash::<CommentError>(&self.client, &proposal_dir).await?;

Ok(file_cid)
}

async fn read_comment_file(&self, hash: &str) -> IpfsResult<CommentMetadata, CommentError> {
read_json_via_cat::<CommentMetadata, CommentError>(&self.client, hash, DEFAULT_MAX_JSON_SIZE).await
}

// Read all comment files within a directory identified by the given hash (directory CID)
async fn read_all_comments_in_dir(&self, dir_hash: &str) -> IpfsResult<Vec<CommentMetadata>, CommentError> {
let file_hashes = list_directory_file_hashes::<CommentError>(&self.client, dir_hash).await?;
let mut comments: Vec<CommentMetadata> = Vec::with_capacity(file_hashes.len());
for fh in file_hashes {
let comment = self.read_comment_file(&fh).await?;
comments.push(comment);
}
Ok(comments)
}
}

+ 7
- 0
src/ipfs/ipfs.rs View File

@@ -0,0 +1,7 @@
use crate::types::ipfs::IpfsResult;

pub trait IpfsService<T> {
type Err;
async fn save(&mut self, item: T) -> IpfsResult<String, Self::Err>;
async fn read(&mut self, hash: String) -> IpfsResult<T, Self::Err>;
}

+ 4
- 0
src/ipfs/mod.rs View File

@@ -0,0 +1,4 @@
pub(crate) mod ipfs;
pub(crate) mod proposal;
pub(crate) mod amendment;
pub(crate) mod comment;

+ 81
- 0
src/ipfs/proposal.rs View File

@@ -0,0 +1,81 @@
use crate::db::db::get_connection;
use crate::ipfs::ipfs::IpfsService;
use crate::schema::proposals;
use crate::types::proposal::{Proposal, ProposalError, ProposalFile};
use crate::utils::content::extract_summary;
use crate::utils::ipfs::{upload_json_and_get_hash, read_json_via_cat as util_read_json_via_cat, DEFAULT_MAX_JSON_SIZE};
use diesel::{RunQueryDsl, QueryDsl, ExpressionMethods};
use ipfs_api_backend_actix::IpfsClient;
use crate::types::ipfs::IpfsResult;

const STORAGE_DIR: &str = "/puffpastry/proposals";
const FILE_EXTENSION: &str = "json";

pub struct ProposalService {
client: IpfsClient,
}

impl IpfsService<ProposalFile> for ProposalService {
type Err = ProposalError;

async fn save(&mut self, proposal: ProposalFile) -> IpfsResult<String, Self::Err> {
self.store_proposal_to_ipfs(proposal).await
}

async fn read(&mut self, hash: String) -> IpfsResult<ProposalFile, Self::Err> {
self.read_proposal_file(hash).await
}
}

impl ProposalService {
pub fn new(client: IpfsClient) -> Self {
Self { client }
}

async fn store_proposal_to_ipfs(&self, proposal: ProposalFile) -> IpfsResult<String, ProposalError> {
let proposal_record = Proposal::new(
proposal.name.clone(),
Some(extract_summary(&proposal.content)),
proposal.creator.clone(),
);

let hash = self.upload_to_ipfs(&proposal).await?;
self.save_to_database(proposal_record.with_cid(hash.clone()).mark_as_current()).await?;

// After saving the new proposal, set is_current = false for any proposal
// that has previous_cid equal to this new hash
{
use crate::schema::proposals::dsl::{proposals as proposals_table, previous_cid as previous_cid_col, is_current as is_current_col};
let mut conn = get_connection()
.map_err(|e| ProposalError::DatabaseError(e))?;

// Ignore the count result; if no rows match, that's fine
let _ = diesel::update(proposals_table.filter(previous_cid_col.eq(hash.clone())))
.set(is_current_col.eq(false))
.execute(&mut conn)
.map_err(|e| ProposalError::DatabaseError(Box::new(e)))?;
}
Ok(hash)
}

async fn upload_to_ipfs(&self, data: &ProposalFile) -> IpfsResult<String, ProposalError> {
upload_json_and_get_hash::<ProposalFile, ProposalError>(&self.client, STORAGE_DIR, FILE_EXTENSION, data).await
}

async fn save_to_database(&self, proposal: Proposal) -> IpfsResult<(), ProposalError> {
let mut conn = get_connection()
.map_err(|e| ProposalError::DatabaseError(e))?;

diesel::insert_into(proposals::table)
.values(&proposal)
.execute(&mut conn)
.map_err(|e| ProposalError::DatabaseError(Box::new(e)))?;

Ok(())
}

async fn read_proposal_file(&self, hash: String) -> IpfsResult<ProposalFile, ProposalError> {
util_read_json_via_cat::<ProposalFile, ProposalError>(&self.client, &hash, DEFAULT_MAX_JSON_SIZE).await
}
}

+ 63
- 0
src/main.rs View File

@@ -0,0 +1,63 @@
extern crate core;

mod routes;
mod schema;
mod db;
mod ipfs;
mod types;
mod repositories;
mod utils;

use crate::routes::proposal::{add_proposal, get_proposal, list_proposals, visit_proposal};
use actix_web::{web, App, HttpServer};
use crate::routes::amendment::{add_amendment, get_amendment, list_amendments};
use crate::routes::auth::{register, login, login_freighter, get_nonce};
use crate::routes::openapi::get_openapi;
use crate::utils::env::load_env;

#[actix_web::main]
async fn main() -> std::io::Result<()> {
// Load environment variables from .env file (if present)
load_env();

let ip = "127.0.0.1";
let port = 7300;

println!("Starting server on http://{}:{}", ip, port);
HttpServer::new(|| {
App::new()
.service(get_openapi)
.service(
web::scope("/api/v1")
.service(
web::scope("/proposals")
.service(list_proposals)
)
.service(
web::scope("/proposal")
.service(add_proposal)
.service(get_proposal)
.service(visit_proposal)
)
.service(
web::scope("/amendment")
.service(add_amendment)
.service(get_amendment)
)
.service(
web::scope("/amendments")
.service(list_amendments)
)
.service(
web::scope("/auth")
.service(register)
.service(login)
.service(login_freighter)
.service(get_nonce)
)
)
})
.bind((ip, port))?
.run()
.await
}

+ 49
- 0
src/repositories/amendment.rs View File

@@ -0,0 +1,49 @@
use crate::db::db::establish_connection;
use crate::schema::amendments::dsl::amendments;
use crate::types::amendment::SelectableAmendment;
use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper};

pub fn get_amendments_for_proposal(
proposal_id: i32,
) -> Result<Vec<SelectableAmendment>, diesel::result::Error> {
let pool = establish_connection().map_err(|_| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to establish database connection".to_string()),
)
})?;

let mut conn = pool.get().map_err(|_| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::UnableToSendCommand,
Box::new("Failed to get connection from pool".to_string()),
)
})?;

use crate::schema::amendments::dsl::{amendments, proposal_id as proposal_id_col};
amendments
.filter(proposal_id_col.eq(proposal_id))
.select(SelectableAmendment::as_select())
.order(crate::schema::amendments::id.asc())
.load(&mut conn)
}

pub fn get_cid_for_amendment(amendment_id: i32) -> Result<String, diesel::result::Error> {
let pool = establish_connection()
.map_err(|_| diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to establish database connection".to_string())
))?;

let mut conn = pool.get()
.map_err(|_| diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to get database connection from pool".to_string())
))?;

use crate::schema::amendments::dsl::{id as id_col, cid as cid_col};
amendments
.filter(id_col.eq(amendment_id))
.select(cid_col)
.get_result(&mut conn)
}

+ 76
- 0
src/repositories/comment.rs View File

@@ -0,0 +1,76 @@
use diesel::{RunQueryDsl, ExpressionMethods, QueryDsl, Connection, BoolExpressionMethods};
use crate::db::db::establish_connection;
use crate::schema::comments::dsl::*;

// Store the new CID for the updated comment thread file on IPFS
// Instead of updating an existing record, we:
// 1) Mark the previous record (by its CID) as not current (is_current=false)
// 2) Insert a new record with is_current=true and previous_cid set to the previous CID
pub fn store_new_thread_cid_for_proposal(
pid: i32,
new_cid_value: &str,
previous_cid_value: Option<&str>,
) -> Result<(), diesel::result::Error> {
let pool = establish_connection()
.map_err(|_| diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to establish database connection".to_string()),
))?;

let mut conn = pool.get().map_err(|_| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to get database connection from pool".to_string()),
)
})?;

conn.transaction(|conn| {
if let Some(prev) = previous_cid_value {
// Best-effort mark previous as not current; it's okay if 0 rows were updated (e.g., first insert)
let _ = diesel::update(comments.filter(cid.eq(prev)))
.set(is_current.eq(false))
.execute(conn)?;
}

diesel::insert_into(crate::schema::comments::table)
.values((
cid.eq(new_cid_value),
proposal_id.eq(pid),
is_current.eq(true),
previous_cid.eq(previous_cid_value),
))
.execute(conn)
.map(|_| ())
})
}

// Retrieve the latest/current thread CID for a proposal
pub fn latest_thread_cid_for_proposal(pid_value: i32) -> Result<String, diesel::result::Error> {
let pool = establish_connection()
.map_err(|_| diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to establish database connection".to_string()),
))?;

let mut conn = pool.get().map_err(|_| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to get database connection from pool".to_string()),
)
})?;

// Prefer the record marked as current; fallback to most recent if none are marked
match comments
.filter(proposal_id.eq(pid_value).and(is_current.eq(true)))
.order(created_at.desc())
.select(cid)
.first::<String>(&mut conn)
{
Ok(current) => Ok(current),
Err(_) => comments
.filter(proposal_id.eq(pid_value))
.order(created_at.desc())
.select(cid)
.first::<String>(&mut conn),
}
}

+ 5
- 0
src/repositories/mod.rs View File

@@ -0,0 +1,5 @@
pub(crate) mod proposal;
pub(crate) mod amendment;
pub(crate) mod user;
pub(crate) mod comment;
pub(crate) mod visit;

+ 48
- 0
src/repositories/proposal.rs View File

@@ -0,0 +1,48 @@
use diesel::ExpressionMethods;
use diesel::{QueryDsl, RunQueryDsl};
use crate::db::db::establish_connection;
use crate::schema::proposals::dsl::*;
use crate::types::proposal::SelectableProposal;

pub fn paginate_proposals(
offset: i64,
limit: i64
) -> Result<Vec<SelectableProposal>, diesel::result::Error> {
let pool = establish_connection()
.map_err(|_| diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to establish database connection".to_string())
))?;
let mut conn = pool.get()
.map_err(|_| diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to get database connection from pool".to_string())
))?;

proposals
.filter(is_current.eq(true))
.offset(offset)
.limit(limit)
.order(created_at.desc())
.load::<SelectableProposal>(&mut conn)
}

pub fn get_cid_for_proposal(proposal_id: i32) -> Result<String, diesel::result::Error> {
let pool = establish_connection()
.map_err(|_| diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to establish database connection".to_string())
))?;

let mut conn = pool.get()
.map_err(|_| diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to get database connection from pool".to_string())
))?;

proposals.
filter(id.eq(proposal_id))
.select(cid)
.get_result(&mut conn)
}

+ 107
- 0
src/repositories/user.rs View File

@@ -0,0 +1,107 @@
use diesel::prelude::*;
use diesel::RunQueryDsl;
use diesel::pg::PgConnection;
use diesel::r2d2;
use serde::Serialize;

use crate::db::db::establish_connection;
use crate::schema::users;
use crate::schema::users::dsl::*;

#[derive(Insertable)]
#[diesel(table_name = users)]
pub struct NewUserInsert {
pub username: String,
pub password_hash: String,
pub stellar_address: String,
pub email: Option<String>,
}

#[derive(Serialize)]
pub struct RegisteredUser {
pub id: i32,
pub username: String,
pub stellar_address: String,
pub email: Option<String>,
}

#[derive(Queryable)]
pub struct UserAuth {
pub id: i32,
pub username: String,
pub full_name: Option<String>,
pub password_hash: String,
pub stellar_address: String,
pub email: Option<String>,
pub is_active: bool,
}

#[derive(Queryable, Serialize)]
pub struct UserForAuth {
pub id: i32,
pub full_name: Option<String>,
pub username: String,
pub stellar_address: String,
pub email: Option<String>,
pub is_active: bool,
}

fn get_conn() -> Result<r2d2::PooledConnection<diesel::r2d2::ConnectionManager<PgConnection>>, diesel::result::Error> {
let pool = establish_connection().map_err(|_| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to establish database connection".to_string()),
)
})?;

pool.get().map_err(|_| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to get database connection from pool".to_string()),
)
})
}

pub(crate) fn create_user(
new_user: NewUserInsert,
) -> Result<RegisteredUser, diesel::result::Error> {
let mut conn = get_conn()?;

diesel::insert_into(users::table)
.values(&new_user)
.returning((id, username, stellar_address, email))
.get_result::<(i32, String, String, Option<String>)>(&mut conn)
.map(|(uid, uname, saddr, mail)| RegisteredUser {
id: uid,
username: uname,
stellar_address: saddr,
email: mail,
})
}

pub(crate) fn find_user_by_username(uname: &str) -> Result<UserAuth, diesel::result::Error> {
let mut conn = get_conn()?;

users
.filter(username.eq(uname))
.select((id, username, full_name, password_hash, stellar_address, email, is_active))
.first::<UserAuth>(&mut conn)
}

pub(crate) fn get_user_for_auth_by_stellar(addr: &str) -> Result<UserForAuth, diesel::result::Error> {
let mut conn = get_conn()?;

users
.filter(stellar_address.eq(addr))
.select((id, full_name, username, stellar_address, email, is_active))
.first::<UserForAuth>(&mut conn)
}

pub(crate) fn get_user_for_auth_by_email(mail: &str) -> Result<UserForAuth, diesel::result::Error> {
let mut conn = get_conn()?;

users
.filter(email.eq(mail))
.select((id, full_name, username, stellar_address, email, is_active))
.first::<UserForAuth>(&mut conn)
}

+ 94
- 0
src/repositories/visit.rs View File

@@ -0,0 +1,94 @@
use diesel::sql_query;
use diesel::RunQueryDsl;
use diesel::sql_types::{BigInt, Int4, Nullable, Text, Timestamptz};
use crate::db::db::establish_connection;
use chrono::{DateTime, Duration, Utc};
use diesel::QueryableByName;

pub fn record_visit(
proposal_id_val: i32,
user_id_val: Option<i32>,
path_val: &str,
method_val: &str,
query_string_val: Option<&str>,
status_code_val: Option<i32>,
ip_address_val: Option<&str>,
user_agent_val: Option<&str>,
referrer_val: Option<&str>,
session_id_val: Option<&str>,
request_id_val: Option<&str>,
) -> Result<(), diesel::result::Error> {
let pool = establish_connection()
.map_err(|_| diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to establish database connection".to_string()),
))?;

let mut conn = pool.get().map_err(|_| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to get database connection from pool".to_string()),
)
})?;

// Use raw SQL to avoid requiring schema.rs updates for the visits table
let query = sql_query(
"insert into visits (user_id, proposal_id, path, method, query_string, status_code, ip_address, user_agent, referrer, session_id, request_id) \
values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)"
)
.bind::<Nullable<Int4>, _>(user_id_val)
.bind::<Int4, _>(proposal_id_val)
.bind::<Text, _>(path_val)
.bind::<Text, _>(method_val)
.bind::<Nullable<Text>, _>(query_string_val)
.bind::<Nullable<Int4>, _>(status_code_val)
.bind::<Nullable<Text>, _>(ip_address_val)
.bind::<Nullable<Text>, _>(user_agent_val)
.bind::<Nullable<Text>, _>(referrer_val)
.bind::<Nullable<Text>, _>(session_id_val)
.bind::<Nullable<Text>, _>(request_id_val);

// Execute insert
query.execute(&mut conn).map(|_| ())
}

#[derive(QueryableByName)]
struct CountResult {
#[diesel(sql_type = BigInt)]
cnt: i64,
}

pub fn recent_visit_count_since(
proposal_id_val: i32,
since: DateTime<Utc>,
) -> Result<i64, diesel::result::Error> {
let pool = establish_connection()
.map_err(|_| diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to establish database connection".to_string()),
))?;

let mut conn = pool.get().map_err(|_| {
diesel::result::Error::DatabaseError(
diesel::result::DatabaseErrorKind::Unknown,
Box::new("Failed to get database connection from pool".to_string()),
)
})?;

let rows: Vec<CountResult> = sql_query(
"select count(*) as cnt from visits where proposal_id = $1 and visited_at > $2"
)
.bind::<Int4, _>(proposal_id_val)
.bind::<Timestamptz, _>(since)
.load(&mut conn)?;

Ok(rows.get(0).map(|r| r.cnt).unwrap_or(0))
}

pub fn recent_visit_count_in_minutes(
proposal_id_val: i32,
minutes: i64,
) -> Result<i64, diesel::result::Error> {
let since = Utc::now() - Duration::minutes(minutes);
recent_visit_count_since(proposal_id_val, since)
}

+ 102
- 0
src/routes/amendment.rs View File

@@ -0,0 +1,102 @@
use actix_web::{get, post, web, HttpResponse};
use ipfs_api_backend_actix::IpfsClient;
use serde::{Deserialize, Serialize};
use serde_json::json;
use crate::ipfs::amendment::AmendmentService;
use crate::ipfs::ipfs::IpfsService;
use crate::repositories::amendment::{get_amendments_for_proposal, get_cid_for_amendment};
use crate::types::amendment::{AmendmentError, AmendmentFile};

#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ListAmendmentsRequest {
pub proposal_id: i32,
}
#[get("list")]
async fn list_amendments(params: web::Query<ListAmendmentsRequest>) -> Result<HttpResponse, actix_web::Error> {
match get_amendments_for_proposal(params.proposal_id) {
Ok(amendments) => Ok(HttpResponse::Ok().json(json!({"amendments": amendments}))),
Err(err) => Ok(HttpResponse::InternalServerError().json(
json!({"error": err.to_string()})
))
}
}

#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AddAmendmentRequest {
pub proposal_id: i32,
pub name: String,
pub content: String,
pub creator: String,
}
#[post("add")]
async fn add_amendment(params: web::Json<AddAmendmentRequest>) -> Result<HttpResponse, actix_web::Error> {
let req = params.into_inner();

// Basic input validation
if req.creator.trim().is_empty() {
return Ok(HttpResponse::BadRequest().json(json!({"error": "Creator is required"})));
}
if req.name.trim().is_empty() {
return Ok(HttpResponse::BadRequest().json(json!({"error": "Name is required"})));
}
if req.content.trim().is_empty() {
return Ok(HttpResponse::BadRequest().json(json!({"error": "Content is required"})));
}
if req.proposal_id <= 0 {
return Ok(HttpResponse::BadRequest().json(json!({"error": "proposalId must be a positive integer"})));
}

match create_and_store_amendment(req).await {
Ok(cid) => Ok(HttpResponse::Ok().json(json!({"cid": cid}))),
Err(err) => {
match err {
AmendmentError::SerializationError(e) => Ok(HttpResponse::BadRequest().json(
json!({"error": format!("Invalid amendment payload: {}", e)})
)),
_ => Ok(HttpResponse::InternalServerError().json(
json!({"error": format!("Failed to add amendment: {}", err)})
)),
}
}
}
}

#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetAmendmentRequest {
pub amendment_id: i32,
}
#[get("get")]
async fn get_amendment(request: web::Query<GetAmendmentRequest>) -> Result<HttpResponse, actix_web::Error> {
let mut amendment_client = AmendmentService::new(IpfsClient::default());
let cid = match get_cid_for_amendment(request.amendment_id) {
Ok(cid) => cid,
Err(err) => return Ok(HttpResponse::InternalServerError().json(
json!({"error": format!("Failed to get amendment: {}", err)})
))
};
let item = amendment_client.read(cid).await;
match item {
Ok(item) => Ok(HttpResponse::Ok().json(json!({"amendment": item}))),
Err(e) => Ok(HttpResponse::InternalServerError().json(json!({"error": format!("Failed to read amendment: {}", e)})))
}
}

async fn create_and_store_amendment(request: AddAmendmentRequest) -> Result<String, AmendmentError> {
let amendment_data = AmendmentFile {
name: request.name.to_string(),
content: request.content.to_string(),
creator: request.creator.to_string(),
created_at: Default::default(),
updated_at: Default::default(),
proposal_id: request.proposal_id,
};

let client = IpfsClient::default();
let mut amendment_service = AmendmentService::new(client);

let cid = amendment_service.save(amendment_data).await?;
Ok(cid)
}

+ 200
- 0
src/routes/auth.rs View File

@@ -0,0 +1,200 @@
use actix_web::{get, post, web, HttpResponse};
use serde::Deserialize;
use serde_json::json;

use crate::repositories::user::{create_user, NewUserInsert, RegisteredUser, find_user_by_username, get_user_for_auth_by_stellar};

const JWT_EXPIRATION_TIME: i64 = 240;

use argon2::{
password_hash::{rand_core::OsRng, PasswordHasher, SaltString, PasswordHash, PasswordVerifier},
Argon2,
};

use crate::utils::auth::{verify_freighter_signature, generate_jwt, generate_freighter_nonce};

#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RegisterRequest {
pub username: String,
pub password: String,
pub stellar_address: String,
pub email: Option<String>,
}

#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LoginRequest {
pub username: String,
pub password: String,
}

#[post("register")]
pub async fn register(req: web::Json<RegisterRequest>) -> Result<HttpResponse, actix_web::Error> {
// Basic validation
if req.username.trim().is_empty() || req.password.is_empty() || req.stellar_address.trim().is_empty() {
return Ok(HttpResponse::BadRequest().json(json!({
"error": "username, password and stellarAddress are required"
})));
}

// Hash password with Argon2id and a random salt
let salt = SaltString::generate(&mut OsRng);
let argon2 = Argon2::default();
let password_hash = match argon2.hash_password(req.password.as_bytes(), &salt) {
Ok(ph) => ph.to_string(),
Err(e) => return Ok(HttpResponse::InternalServerError().json(json!({
"error": format!("Failed to hash password: {}", e)
}))),
};

let new_user = NewUserInsert {
username: req.username.trim().to_string(),
password_hash,
stellar_address: req.stellar_address.trim().to_string(),
email: req.email.clone(),
};

match create_user(new_user) {
Ok(user) => Ok(HttpResponse::Created().json(json!({"user": user}))),
Err(diesel::result::Error::DatabaseError(diesel::result::DatabaseErrorKind::UniqueViolation, info)) => {
Ok(HttpResponse::Conflict().json(json!({
"error": format!("Unique constraint violation: {}", info.message())
})))
}
Err(e) => Ok(HttpResponse::InternalServerError().json(json!({
"error": format!("Failed to create user: {}", e)
}))),
}
}

#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct FreighterLoginRequest {
pub stellar_address: String,
}


#[post("login")]
pub async fn login(req: web::Json<LoginRequest>) -> Result<HttpResponse, actix_web::Error> {
// Basic validation
if req.username.trim().is_empty() || req.password.is_empty() {
return Ok(HttpResponse::BadRequest().json(json!({
"error": "Username and password are required"
})));
}

// Fetch user by username
let user_auth = match find_user_by_username(req.username.trim()) {
Ok(u) => u,
Err(diesel::result::Error::NotFound) => {
return Ok(HttpResponse::Unauthorized().json(json!({
"error": "Invalid credentials"
})));
}
Err(e) => {
return Ok(HttpResponse::InternalServerError().json(json!({
"error": format!("Failed to query user: {}", e)
})));
}
};

if !user_auth.is_active {
return Ok(HttpResponse::Unauthorized().json(json!({
"error": "Account is inactive"
})));
}

// Verify password using Argon2
let argon2 = Argon2::default();
let parsed_hash = match PasswordHash::new(&user_auth.password_hash) {
Ok(ph) => ph,
Err(_) => {
return Ok(HttpResponse::InternalServerError().json(json!({
"error": "Stored password hash is invalid"
})));
}
};

match argon2.verify_password(req.password.as_bytes(), &parsed_hash) {
Ok(_) => {
// Build JWT token on successful password verification
let full_name_str = user_auth.full_name.as_deref().unwrap_or("");
let token = match generate_jwt(
user_auth.id,
full_name_str,
&user_auth.username,
&user_auth.stellar_address,
Some(JWT_EXPIRATION_TIME),
) {
Ok(t) => t,
Err(e) => {
return Ok(HttpResponse::InternalServerError().json(json!({
"error": e
})));
}
};

Ok(HttpResponse::Ok().json(json!({
"token": token
})))
}
Err(_) => Ok(HttpResponse::Unauthorized().json(json!({
"error": "Invalid credentials"
}))),
}
}

#[get("nonce")]
pub async fn get_nonce() -> HttpResponse {
let nonce = generate_freighter_nonce();
HttpResponse::Ok().json(json!({"nonce": nonce}))
}

#[post("login/freighter")]
pub async fn login_freighter(req: web::Json<FreighterLoginRequest>) -> Result<HttpResponse, actix_web::Error> {
// Basic validation
if req.stellar_address.trim().is_empty() {
return Ok(HttpResponse::BadRequest().json(json!({
"error": "stellarAddress is required"
})));
}

// Fetch user by stellar address
let user = match get_user_for_auth_by_stellar(req.stellar_address.trim()) {
Ok(u) => u,
Err(diesel::result::Error::NotFound) => {
return Ok(HttpResponse::Unauthorized().json(json!({
"error": "User not found"
})));
}
Err(e) => {
return Ok(HttpResponse::InternalServerError().json(json!({
"error": format!("Failed to query user: {}", e)
})));
}
};

if !user.is_active {
return Ok(HttpResponse::Unauthorized().json(json!({
"error": "Account is inactive"
})));
}

// Build JWT
let full_name_str = user.full_name.as_deref().unwrap_or("");
let token = match generate_jwt(user.id, full_name_str, &user.username, &user.stellar_address, Some(JWT_EXPIRATION_TIME)) {
Ok(t) => t,
Err(e) => {
return Ok(HttpResponse::InternalServerError().json(json!({
"error": e
})));
}
};

Ok(HttpResponse::Ok().json(json!({
"token": token
})))
}



+ 18
- 0
src/routes/comment.rs View File

@@ -0,0 +1,18 @@
use actix_web::{post, web, HttpResponse};
use chrono::Utc;
use ipfs_api_backend_actix::IpfsClient;
use serde::Deserialize;
use serde_json::json;

use crate::types::comment::{CommentError, CommentMetadata};

#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AppendLocalCommentRequest {
pub thread_id: String,
pub content: String,
pub creator: String,
pub proposal_cid: Option<String>,
pub amendment_cid: Option<String>,
pub parent_comment_cid: Option<String>,
}

+ 5
- 0
src/routes/mod.rs View File

@@ -0,0 +1,5 @@
pub(crate) mod proposal;
pub(crate) mod amendment;
pub(crate) mod auth;
pub(crate) mod comment;
pub(crate) mod openapi;

+ 251
- 0
src/routes/openapi.rs View File

@@ -0,0 +1,251 @@
use actix_web::{get, HttpResponse};
use serde_json::{json, Value};

// Minimal OpenAPI 3.0 spec to enable frontend code generation.
// NOTE: This is intentionally lightweight. We can later replace the manual spec
// with an automatic generator (e.g., Apistos annotations) without changing the endpoint path.

fn openapi_spec() -> Value {
// Basic schemas for known request bodies
let add_proposal_request = json!({
"type": "object",
"properties": {
"name": {"type": "string"},
"description": {"type": "string"},
"creator": {"type": "string"}
},
"required": ["name", "description", "creator"],
"additionalProperties": false
});

let visit_proposal_request = json!({
"type": "object",
"properties": {
"proposalId": {"type": "integer", "format": "int32"},
"path": {"type": "string"},
"method": {"type": "string"},
"queryString": {"type": "string"},
"statusCode": {"type": "integer", "format": "int32"},
"referrer": {"type": "string"}
},
"required": ["proposalId"],
"additionalProperties": false
});

let register_request = json!({
"type": "object",
"properties": {
"username": {"type": "string"},
"password": {"type": "string"},
"stellarAddress": {"type": "string"},
"email": {"type": ["string", "null"]}
},
"required": ["username", "password", "stellarAddress"],
"additionalProperties": false
});

let login_request = json!({
"type": "object",
"properties": {
"username": {"type": "string"},
"password": {"type": "string"}
},
"required": ["username", "password"],
"additionalProperties": false
});

let freighter_login_request = json!({
"type": "object",
"properties": {
"stellarAddress": {"type": "string"}
},
"required": ["stellarAddress"],
"additionalProperties": false
});

let proposal_item = json!({
"type": "object",
"properties": {
"id": {"type": "integer", "format": "int32"},
"name": {"type": "string"},
"cid": {"type": "string"},
"summary": {"type": ["string", "null"]},
"creator": {"type": "string"},
"isCurrent": {"type": "boolean"},
"previousCid": {"type": ["string", "null"]},
"createdAt": {"type": ["string", "null"]},
"updatedAt": {"type": ["string", "null"]}
},
"required": ["id", "name", "cid", "creator", "isCurrent"],
"additionalProperties": false
});

let proposal_list = json!({
"type": "array",
"items": {"$ref": "#/components/schemas/ProposalItem"}
});

let list_proposals_response = json!({
"type": "object",
"properties": {
"proposals": {"$ref": "#/components/schemas/ProposalList"}
}
});

// Generic JSON response schema when we don't have strict typing
let generic_object = json!({
"type": "object",
"additionalProperties": true
});

json!({
"openapi": "3.0.3",
"info": {
"title": "PuffPastry API",
"version": "1.0.0"
},
"servers": [
{"url": "http://localhost:7300"}
],
"paths": {
"/api/v1/proposals/list": {
"get": {
"summary": "List proposals",
"operationId": "listProposals",
"tags": ["Proposal"],
"parameters": [
{"name": "offset", "in": "query", "required": true, "schema": {"type": "integer", "format": "int64"}},
{"name": "limit", "in": "query", "required": true, "schema": {"type": "integer", "format": "int64"}}
],
"responses": {
"200": {"description": "OK", "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListProposalsResponse"}}}}
}
}
},
"/api/v1/proposal/add": {
"post": {
"summary": "Create a proposal",
"operationId": "createProposal",
"tags": ["Proposal"],
"requestBody": {"required": true, "content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddProposalRequest"}}}},
"responses": {
"200": {"description": "OK", "content": {"application/json": {"schema": generic_object}}},
"400": {"description": "Bad Request"}
}
}
},
"/api/v1/proposal/get": {
"get": {
"summary": "Get a proposal by id",
"operationId": "getProposal",
"tags": ["Proposal"],
"parameters": [
{"name": "id", "in": "query", "required": true, "schema": {"type": "integer", "format": "int32"}}
],
"responses": {
"200": {"description": "OK", "content": {"application/json": {"schema": generic_object}}}
}
}
},
"/api/v1/proposal/visit": {
"post": {
"summary": "Record a proposal visit",
"operationId": "recordProposalVisit",
"tags": ["Proposal"],
"requestBody": {"required": true, "content": {"application/json": {"schema": {"$ref": "#/components/schemas/VisitProposalRequest"}}}},
"responses": {
"201": {"description": "Created", "content": {"application/json": {"schema": generic_object}}}
}
}
},
"/api/v1/amendment/add": {
"post": {
"summary": "Create an amendment",
"operationId": "createAmendment",
"tags": ["Amendment"],
"requestBody": {"required": true, "content": {"application/json": {"schema": generic_object}}},
"responses": {"200": {"description": "OK"}}
}
},
"/api/v1/amendment/get": {
"get": {
"summary": "Get an amendment",
"operationId": "getAmendment",
"tags": ["Amendment"],
"parameters": [
{"name": "id", "in": "query", "required": true, "schema": {"type": "integer", "format": "int32"}}
],
"responses": {"200": {"description": "OK", "content": {"application/json": {"schema": generic_object}}}}
}
},
"/api/v1/amendments/list": {
"get": {
"summary": "List amendments",
"operationId": "listAmendments",
"tags": ["Amendment"],
"parameters": [
{"name": "proposalId", "in": "query", "required": true, "schema": {"type": "integer", "format": "int32"}}
],
"responses": {"200": {"description": "OK", "content": {"application/json": {"schema": generic_object}}}}
}
},
"/api/v1/auth/register": {
"post": {
"summary": "Register a user",
"operationId": "registerUser",
"tags": ["Auth"],
"requestBody": {"required": true, "content": {"application/json": {"schema": {"$ref": "#/components/schemas/RegisterRequest"}}}},
"responses": {"201": {"description": "Created", "content": {"application/json": {"schema": generic_object}}},
"400": {"description": "Bad Request"},
"409": {"description": "Conflict"}}
}
},
"/api/v1/auth/login": {
"post": {
"summary": "Login with username/password",
"operationId": "loginUser",
"tags": ["Auth"],
"requestBody": {"required": true, "content": {"application/json": {"schema": {"$ref": "#/components/schemas/LoginRequest"}}}},
"responses": {"200": {"description": "OK", "content": {"application/json": {"schema": generic_object}}},
"401": {"description": "Unauthorized"}}
}
},
"/api/v1/auth/nonce": {
"get": {
"summary": "Get freighter login nonce",
"operationId": "getFreighterNonce",
"tags": ["Auth"],
"responses": {"200": {"description": "OK", "content": {"application/json": {"schema": generic_object}}}}
}
},
"/api/v1/auth/login/freighter": {
"post": {
"summary": "Login with Stellar (Freighter)",
"operationId": "loginFreighter",
"tags": ["Auth"],
"requestBody": {"required": true, "content": {"application/json": {"schema": {"$ref": "#/components/schemas/FreighterLoginRequest"}}}},
"responses": {"200": {"description": "OK", "content": {"application/json": {"schema": generic_object}}},
"401": {"description": "Unauthorized"}}
}
}
},
"components": {
"schemas": {
"AddProposalRequest": add_proposal_request,
"VisitProposalRequest": visit_proposal_request,
"RegisterRequest": register_request,
"LoginRequest": login_request,
"FreighterLoginRequest": freighter_login_request,
"ListProposalsResponse": list_proposals_response,
"ProposalItem": proposal_item,
"ProposalList": proposal_list
}
}
})
}

#[get("/openapi.json")]
pub async fn get_openapi() -> HttpResponse {
let spec = openapi_spec();
HttpResponse::Ok().json(spec)
}

+ 157
- 0
src/routes/proposal.rs View File

@@ -0,0 +1,157 @@
use crate::ipfs::ipfs::IpfsService;
use crate::ipfs::proposal::ProposalService;
use crate::repositories::proposal::{get_cid_for_proposal, paginate_proposals};
use crate::repositories::visit::record_visit;
use crate::types::proposal::ProposalError;
use crate::types::proposal::ProposalFile;
use actix_web::{get, post, web, HttpResponse, HttpRequest};
use chrono::Utc;
use ipfs_api_backend_actix::IpfsClient;
use serde::{Deserialize, Serialize};
use serde_json::json;

#[derive(Deserialize)]
struct PaginationParams {
offset: i64,
limit: i64,
}
#[get("list")]
async fn list_proposals(params: web::Query<PaginationParams>) -> Result<HttpResponse, actix_web::Error> {
match paginate_proposals(params.offset, params.limit) {
Ok(proposals) => Ok(HttpResponse::Ok().json(json!({"proposals": proposals}))),
Err(err) => Ok(HttpResponse::InternalServerError().json(json!({
"error": format!("Failed to fetch proposals: {}", err)
})))
}
}

#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AddProposalRequest {
pub name: String,
pub description: String,
pub creator: String,
}
#[post("/add")]
async fn add_proposal(
request: web::Json<AddProposalRequest>,
) -> Result<HttpResponse, actix_web::Error> {
let req = request.into_inner();

// Basic input validation
if req.creator.trim().is_empty() {
return Ok(HttpResponse::BadRequest().json(json!({"error": "Creator is required"})));
}
if req.name.trim().is_empty() {
return Ok(HttpResponse::BadRequest().json(json!({"error": "Name is required"})));
}
if req.description.trim().is_empty() {
return Ok(HttpResponse::BadRequest().json(json!({"error": "Description is required"})));
}

match create_and_store_proposal(&req).await {
Ok(cid) => Ok(HttpResponse::Ok().json(json!({ "cid": cid }))),
Err(e) => {
match e {
ProposalError::SerializationError(err) => Ok(HttpResponse::BadRequest().json(json!({
"error": format!("Invalid proposal payload: {}", err)
}))),
_ => Ok(HttpResponse::InternalServerError().json(json!({
"error": format!("Failed to create proposal: {}", e)
}))),
}
}
}
}

#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct VisitProposalRequest {
pub proposal_id: i32,
pub path: Option<String>,
pub method: Option<String>,
pub query_string: Option<String>,
pub status_code: Option<i32>,
pub referrer: Option<String>,
}

#[post("visit")]
async fn visit_proposal(req: HttpRequest, payload: web::Json<VisitProposalRequest>) -> Result<HttpResponse, actix_web::Error> {
let body = payload.into_inner();
if body.proposal_id <= 0 {
return Ok(HttpResponse::BadRequest().json(json!({"error": "proposalId must be a positive integer"})));
}

// Take analytics values from the request body (fallback to request path/method to satisfy NOT NULL columns)
let path_owned = body.path.unwrap_or_else(|| req.path().to_string());
let method_owned = body.method.unwrap_or_else(|| req.method().to_string());
let query_string_opt = body.query_string; // Option<String>
let status_code_val = body.status_code; // Option<i32>
let referrer_opt = body.referrer; // Option<String>

let connection_info = req.connection_info();
let ip_address_val = connection_info.realip_remote_addr().map(|s| s.to_string());
let user_agent_val = req.headers().get("User-Agent").and_then(|v| v.to_str().ok()).map(|s| s.to_string());
let session_id_val = req.headers().get("X-Session-Id").and_then(|v| v.to_str().ok()).map(|s| s.to_string());
let request_id_val = req.headers().get("X-Request-Id").and_then(|v| v.to_str().ok()).map(|s| s.to_string());

// No authenticated user context available here; record as anonymous
let user_id_val: Option<i32> = None;

match record_visit(
body.proposal_id,
user_id_val,
&path_owned,
&method_owned,
query_string_opt.as_deref(),
status_code_val,
ip_address_val.as_deref(),
user_agent_val.as_deref(),
referrer_opt.as_deref(),
session_id_val.as_deref(),
request_id_val.as_deref(),
) {
Ok(_) => Ok(HttpResponse::Created().json(json!({"ok": true}))),
Err(err) => Ok(HttpResponse::InternalServerError().json(json!({"error": format!("Failed to record visit: {}", err)}))),
}
}

#[derive(Deserialize)]
struct GetProposalParams {
pub id: i32,
}
#[get("get")]
async fn get_proposal(params: web::Query<GetProposalParams>) -> Result<HttpResponse, actix_web::Error> {
let mut proposal_client = ProposalService::new(IpfsClient::default());
let cid = match get_cid_for_proposal(params.id) {
Ok(cid) => cid,
Err(err) => return Ok(HttpResponse::InternalServerError().json(json!({
"error": format!("Failed to fetch proposal CID: {}", err)
})))
};
let item = proposal_client.read(cid).await;
match item {
Ok(proposal) => Ok(HttpResponse::Ok().json(json!({"proposal": proposal}))),
Err(e) => Ok(HttpResponse::InternalServerError().json(json!({
"error": format!("Failed to read proposal: {}", e)
})))
}
}

async fn create_and_store_proposal(
request: &AddProposalRequest,
) -> Result<String, ProposalError> {
let proposal_data = ProposalFile {
name: request.name.to_string(),
content: request.description.to_string(),
creator: request.creator.to_string(),
created_at: Utc::now().naive_utc(),
updated_at: Utc::now().naive_utc(),
};

let client = IpfsClient::default();
let mut proposal_service = ProposalService::new(client);

let cid = proposal_service.save(proposal_data).await?;
Ok(cid)
}

+ 100
- 0
src/schema.rs View File

@@ -0,0 +1,100 @@
// @generated automatically by Diesel CLI.

pub mod sql_types {
#[derive(diesel::query_builder::QueryId, Clone, diesel::sql_types::SqlType)]
#[diesel(postgres_type(name = "amendment_status"))]
pub struct AmendmentStatus;
}

diesel::table! {
use diesel::sql_types::*;
use super::sql_types::AmendmentStatus;

amendments (id) {
id -> Int4,
name -> Text,
cid -> Text,
summary -> Nullable<Text>,
status -> AmendmentStatus,
creator -> Text,
is_current -> Bool,
previous_cid -> Nullable<Text>,
created_at -> Timestamptz,
updated_at -> Timestamptz,
proposal_id -> Int4,
}
}

diesel::table! {
comments (id) {
id -> Int4,
cid -> Text,
is_current -> Bool,
previous_cid -> Nullable<Text>,
proposal_id -> Int4,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}

diesel::table! {
proposals (id) {
id -> Int4,
name -> Text,
cid -> Text,
summary -> Nullable<Text>,
creator -> Text,
is_current -> Bool,
previous_cid -> Nullable<Text>,
created_at -> Nullable<Timestamp>,
updated_at -> Nullable<Timestamp>,
}
}

diesel::table! {
users (id) {
id -> Int4,
username -> Text,
password_hash -> Text,
stellar_address -> Text,
email -> Nullable<Text>,
is_active -> Bool,
roles -> Array<Nullable<Text>>,
last_login_at -> Nullable<Timestamptz>,
created_at -> Timestamptz,
updated_at -> Timestamptz,
full_name -> Nullable<Text>,
}
}

diesel::table! {
visits (id) {
id -> Int4,
user_id -> Nullable<Int4>,
proposal_id -> Int4,
path -> Text,
method -> Text,
query_string -> Nullable<Text>,
status_code -> Nullable<Int4>,
ip_address -> Nullable<Text>,
user_agent -> Nullable<Text>,
referrer -> Nullable<Text>,
session_id -> Nullable<Text>,
request_id -> Nullable<Text>,
visited_at -> Timestamptz,
created_at -> Timestamptz,
updated_at -> Timestamptz,
}
}

diesel::joinable!(amendments -> proposals (proposal_id));
diesel::joinable!(comments -> proposals (proposal_id));
diesel::joinable!(visits -> users (user_id));

diesel::allow_tables_to_appear_in_same_query!(
amendments,
comments,
proposals,
users,
visits,
);

+ 117
- 0
src/types/amendment.rs View File

@@ -0,0 +1,117 @@
use core::fmt;
use crate::schema::amendments;
use chrono::NaiveDateTime;
use diesel::{Insertable, Queryable, Selectable};
use serde::{Deserialize, Serialize};

#[derive(Debug)]
pub enum AmendmentError {
DatabaseError(Box<dyn std::error::Error>),
IpfsError(Box<dyn std::error::Error>),
SerializationError(serde_json::Error),
}

impl fmt::Display for AmendmentError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AmendmentError::DatabaseError(e) => write!(f, "Database error: {}", e),
AmendmentError::IpfsError(e) => write!(f, "Ipfs error: {}", e),
AmendmentError::SerializationError(e) => write!(f, "Serialization error: {}", e),
}
}
}

#[derive(Queryable, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AmendmentFile {
pub name: String,
pub content: String,
pub creator: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub proposal_id: i32,
}

#[derive(Debug, Clone, Serialize, Deserialize, diesel_derive_enum::DbEnum)]
#[db_enum(existing_type_path = "crate::schema::sql_types::AmendmentStatus")]
pub enum AmendmentStatus {
Proposed,
Approved,
Withdrawn,
Rejected,
}

#[derive(Queryable, Selectable, Clone, Serialize, Deserialize)]
#[diesel(table_name = amendments)]
#[diesel(check_for_backend(diesel::pg::Pg))]
#[serde(rename_all = "camelCase")]
pub struct SelectableAmendment {
pub id: i32,
pub name: String,
pub cid: String,
pub summary: Option<String>,
pub status: AmendmentStatus,
pub creator: String,
pub is_current: bool,
pub previous_cid: Option<String>,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub proposal_id: i32,
}

#[derive(Insertable, Clone, Serialize, Deserialize)]
#[diesel(table_name = amendments)]
#[diesel(check_for_backend(diesel::pg::Pg))]
#[serde(rename_all = "camelCase")]
pub struct Amendment {
pub name: String,
pub cid: Option<String>,
pub summary: Option<String>,
pub status: AmendmentStatus,
pub creator: String,
pub is_current: bool,
pub previous_cid: Option<String>,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub proposal_id: i32,
}

impl Amendment {
pub fn new(name: String, summary: Option<String>, creator: String, proposal_id: i32) -> Self {
Amendment {
name,
summary,
status: AmendmentStatus::Proposed,
creator,
is_current: true,
previous_cid: None,
created_at: chrono::Utc::now().naive_utc(),
updated_at: chrono::Utc::now().naive_utc(),
proposal_id,
cid: None,
}
}

pub fn with_cid(mut self, cid: String) -> Self {
self.cid = Some(cid);
self
}

pub fn mark_as_current(mut self) -> Self {
self.is_current = true;
self.updated_at = chrono::Utc::now().naive_utc();
self
}
}

impl From<serde_json::Error> for AmendmentError {
fn from(e: serde_json::Error) -> Self {
AmendmentError::SerializationError(e)
}
}

impl From<std::io::Error> for AmendmentError {
fn from(e: std::io::Error) -> Self {
AmendmentError::IpfsError(Box::new(e))
}
}

+ 50
- 0
src/types/comment.rs View File

@@ -0,0 +1,50 @@
use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize};
use std::fmt;

#[derive(Debug)]
pub enum CommentError {
IpfsError(Box<dyn std::error::Error>),
SerializationError(serde_json::Error),
}

impl fmt::Display for CommentError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
CommentError::IpfsError(e) => write!(f, "IPFS error: {}", e),
CommentError::SerializationError(e) => write!(f, "Serialization error: {}", e),
}
}
}

#[derive(Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CommentMetadata {
pub content: String,
pub creator: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
}

#[derive(Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CommentFile {
pub comments: Vec<CommentMetadata>,
// Optional association fields to link this comment to a target entity
pub proposal_cid: Option<String>,
pub amendment_cid: Option<String>,
// Optional parent comment for threads
pub parent_comment_cid: Option<String>,
}

impl From<serde_json::Error> for CommentError {
fn from(e: serde_json::Error) -> Self {
CommentError::SerializationError(e)
}
}

impl From<std::io::Error> for CommentError {
fn from(e: std::io::Error) -> Self {
CommentError::IpfsError(Box::new(e))
}
}

+ 3
- 0
src/types/ipfs.rs View File

@@ -0,0 +1,3 @@
// Make IpfsResult reusable by allowing a generic error type with a default of ProposalError.
pub type IpfsResult<T, E> = Result<T, E>;


+ 4
- 0
src/types/mod.rs View File

@@ -0,0 +1,4 @@
pub(crate) mod proposal;
pub(crate) mod amendment;
pub(crate) mod ipfs;
pub(crate) mod comment;

+ 127
- 0
src/types/proposal.rs View File

@@ -0,0 +1,127 @@
use crate::schema::proposals;
use chrono::NaiveDateTime;
use diesel::Insertable;
use diesel::Queryable;
use diesel::Selectable;
use serde::de::StdError;
use serde::{Deserialize, Serialize};
use std::fmt;

#[derive(Debug)]
pub enum ProposalError {
ProposalNotFound,
DatabaseError(Box<dyn std::error::Error>),
IpfsError(Box<dyn std::error::Error>),
SerializationError(serde_json::Error),
}

impl fmt::Display for ProposalError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ProposalError::ProposalNotFound => write!(f, "Proposal not found"),
ProposalError::DatabaseError(e) => write!(f, "Database error: {}", e),
ProposalError::IpfsError(e) => write!(f, "IPFS error: {}", e),
ProposalError::SerializationError(e) => write!(f, "Serialization error: {}", e),
}
}
}

impl StdError for ProposalError {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
match self {
ProposalError::ProposalNotFound => None,
ProposalError::DatabaseError(e) => Some(e.as_ref()),
ProposalError::IpfsError(e) => Some(e.as_ref()),
ProposalError::SerializationError(e) => Some(e),
}
}
}

#[derive(Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ProposalFile {
pub name: String,
pub content: String,
pub creator: String,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
}

#[derive(Queryable, Selectable, Clone, Serialize, Deserialize)]
#[diesel(table_name = proposals)]
#[diesel(check_for_backend(diesel::pg::Pg))]
#[serde(rename_all = "camelCase")]
pub struct SelectableProposal {
pub id: i32,
pub name: String,
pub cid: String,
pub summary: Option<String>,
pub creator: String,
pub is_current: bool,
pub previous_cid: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub updated_at: Option<NaiveDateTime>,
}

#[derive(Insertable, Clone, Serialize, Deserialize)]
#[diesel(table_name = proposals)]
#[diesel(check_for_backend(diesel::pg::Pg))]
#[serde(rename_all = "camelCase")]
pub struct Proposal {
pub name: String,
pub cid: Option<String>,
pub summary: Option<String>,
pub creator: String,
pub is_current: bool,
pub previous_cid: Option<String>,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
}

impl Proposal {
pub fn new(name: String, summary: Option<String>, creator: String) -> Self {
Self {
name,
cid: None,
creator,
summary,
is_current: false,
previous_cid: None,
created_at: chrono::Local::now().naive_local(),
updated_at: chrono::Local::now().naive_local(),
}
}

pub fn with_cid(mut self, cid: String) -> Self {
self.cid = Some(cid);
self
}

pub fn with_summary(mut self, summary: String) -> Self {
self.summary = Some(summary);
self
}

pub fn with_previous_cid(mut self, previous_cid: String) -> Self {
self.previous_cid = Some(previous_cid);
self
}

pub fn mark_as_current(mut self) -> Self {
self.is_current = true;
self.updated_at = chrono::Local::now().naive_local();
self
}
}

impl From<serde_json::Error> for ProposalError {
fn from(e: serde_json::Error) -> Self {
ProposalError::SerializationError(e)
}
}

impl From<std::io::Error> for ProposalError {
fn from(e: std::io::Error) -> Self {
ProposalError::IpfsError(Box::new(e))
}
}

+ 85
- 0
src/utils/auth.rs View File

@@ -0,0 +1,85 @@
use base64::Engine as _;
use base64::engine::general_purpose::STANDARD as BASE64;
use ed25519_dalek::{Signature, VerifyingKey, Verifier};
use serde::{Deserialize, Serialize};
use stellar_strkey::ed25519::PublicKey as StellarPublicKey;
use std::env;
use chrono::{Utc, Duration};
use jsonwebtoken::{encode, Header, EncodingKey};

/// Verify a Freighter-provided ed25519 signature over a message using a Stellar G... address.
///
/// Returns Ok(()) if valid; otherwise returns Err with a human-readable reason.
pub fn verify_freighter_signature(stellar_address: &str, message: &str, signature_b64: &str) -> Result<(), String> {
// Decode Stellar G... address to 32-byte ed25519 public key
let pk = StellarPublicKey::from_string(stellar_address)
.map_err(|e| format!("Invalid stellarAddress: {}", e))?;
let pk_bytes = pk.0;

// Build verifying key
let vkey = VerifyingKey::from_bytes(&pk_bytes)
.map_err(|e| format!("Invalid public key: {}", e))?;

// Decode signature from base64
let sig_bytes = BASE64.decode(signature_b64)
.map_err(|e| format!("Invalid signature encoding: {}", e))?;
let sig = Signature::from_slice(&sig_bytes)
.map_err(|e| format!("Invalid signature: {}", e))?;

// Verify
vkey.verify(message.as_bytes(), &sig)
.map_err(|_| "Signature verification failed".to_string())
}

/// Generate a cryptographically-random nonce for Freighter to sign on the frontend.
///
/// Notes:
/// - This returns a UUID v4 string (e.g., 550e8400-e29b-41d4-a716-446655440000),
/// which has 122 bits of randomness and is suitable as a nonce.
/// - You should bind this nonce to a short-lived challenge server-side (e.g., with an expiry)
/// and verify the signed nonce/message using `verify_freighter_signature` before issuing JWTs.
pub fn generate_freighter_nonce() -> String {
uuid::Uuid::new_v4().to_string()
}

#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct Claims {
// Standard-like claims
exp: i64,
iat: i64,
// Custom claims
user_id: i32,
full_name: String,
username: String,
stellar_address: String,
}

/// Generate a JWT embedding the user's id, full name, username, and stellar address.
///
/// The signing secret is read from the JWT_SECRET environment variable.
/// The token uses HS256 by default and expires in `expiration_minutes` (default: 60 minutes).
pub fn generate_jwt(
user_id: i32,
full_name: &str,
username: &str,
stellar_address: &str,
expiration_minutes: Option<i64>,
) -> Result<String, String> {
let secret = env::var("JWT_SECRET")
.map_err(|_| "JWT_SECRET not set".to_string())?;

let now = Utc::now();
let exp_minutes = expiration_minutes.unwrap_or(60);
let claims = Claims {
exp: (now + Duration::minutes(exp_minutes)).timestamp(),
iat: now.timestamp(),
user_id,
full_name: full_name.to_string(),
username: username.to_string(),
stellar_address: stellar_address.to_string(),
};

encode(&Header::default(), &claims, &EncodingKey::from_secret(secret.as_bytes()))
.map_err(|e| format!("Failed to sign JWT: {}", e))
}

+ 7
- 0
src/utils/content.rs View File

@@ -0,0 +1,7 @@
pub fn extract_summary(content: &str) -> String {
content
.split('\n')
.next()
.unwrap_or_default()
.to_string()
}

+ 16
- 0
src/utils/env.rs View File

@@ -0,0 +1,16 @@
/// Utilities for loading environment variables from a local .env file using dotenvy.
///
/// Call this early in your application (e.g., at the start of main) so that
/// subsequent `std::env::var` calls can read variables defined in .env.
pub fn load_env() {
// Try to load from default .env path. It's common to ignore a missing file
// so that environments that rely on real environment variables (e.g., prod)
// don't fail to start.
if let Err(err) = dotenvy::dotenv() {
// If the file is not found, silently continue; otherwise print a warning.
// This keeps behavior non-fatal while still surfacing unexpected errors.
if !matches!(err, dotenvy::Error::Io(ref io_err) if io_err.kind() == std::io::ErrorKind::NotFound) {
eprintln!("Warning: failed to load .env: {err}");
}
}
}

+ 148
- 0
src/utils/ipfs.rs View File

@@ -0,0 +1,148 @@
use ipfs_api_backend_actix::{IpfsApi, IpfsClient};
use serde::{de::DeserializeOwned, Serialize};
use std::io::{Cursor, Error as IoError, ErrorKind};
use std::path::Path;

use crate::types::ipfs::IpfsResult;

pub fn create_file_path(base_dir: &str, extension: &str) -> String {
let filename = format!("{}.{}", uuid::Uuid::new_v4(), extension);
let path = Path::new(base_dir).join(filename);
path.to_string_lossy().into_owned()
}

pub async fn create_storage_directory<E>(client: &IpfsClient, dir: &str) -> IpfsResult<(), E>
where
E: From<IoError>,
{
client
.files_mkdir(dir, true)
.await
.map_err(|e| E::from(IoError::new(ErrorKind::Other, format!("IPFS mkdir error: {}", e))))?;
Ok(())
}

pub async fn save_json_file<T, E>(
client: &IpfsClient,
path: &str,
data: &T,
) -> IpfsResult<(), E>
where
T: Serialize,
E: From<serde_json::Error> + From<IoError>,
{
let json = serde_json::to_string::<T>(data)?;
let file_content = Cursor::new(json.into_bytes());

client
.files_write(path, true, true, file_content)
.await
.map_err(|e| E::from(IoError::new(ErrorKind::Other, format!("IPFS write error: {}", e))))?;
Ok(())
}

pub async fn read_json_via_cat<T, E>(
client: &IpfsClient,
hash: &str,
max_size: usize,
) -> IpfsResult<T, E>
where
T: DeserializeOwned,
E: From<serde_json::Error> + From<IoError>,
{
let stream = client.cat(hash);
let mut content = Vec::with_capacity(1024);
let mut total_size = 0;

futures::pin_mut!(stream);

while let Some(chunk) = futures::StreamExt::next(&mut stream).await {
let chunk = chunk.map_err(|e| E::from(IoError::new(
ErrorKind::Other,
format!("Failed to read IPFS chunk: {}", e),
)))?;

total_size += chunk.len();
if total_size > max_size {
return Err(E::from(IoError::new(
ErrorKind::Other,
"File exceeds maximum allowed size",
)));
}

content.extend_from_slice(&chunk);
}

if content.is_empty() {
return Err(E::from(IoError::new(
ErrorKind::Other,
"Empty response from IPFS",
)));
}

let value = serde_json::from_slice::<T>(&content)?;
Ok(value)
}

pub async fn retrieve_content_hash<E>(client: &IpfsClient, path: &str) -> IpfsResult<String, E>
where
E: From<IoError>,
{
let stat = client
.files_stat(path)
.await
.map_err(|e| E::from(IoError::new(ErrorKind::Other, format!("IPFS stat error: {}", e))))?;
Ok(stat.hash)
}

pub const DEFAULT_MAX_JSON_SIZE: usize = 10 * 1024 * 1024;

/// List the child entries of an IPFS directory given its CID/hash and
/// return the CIDs of child files.
pub async fn list_directory_file_hashes<E>(client: &IpfsClient, dir_hash: &str) -> IpfsResult<Vec<String>, E>
where
E: From<IoError>,
{
// Use `ls` which works with CIDs (not only MFS paths)
let resp = client
.ls(dir_hash)
.await
.map_err(|e| E::from(IoError::new(
ErrorKind::Other,
format!("IPFS ls error: {}", e),
)))?;

// Collect links from the first (and usually only) object
let mut file_hashes: Vec<String> = Vec::new();
for obj in resp.objects {
for link in obj.links {
// The response type typically has `typ` as a numeric code (2 for file) or a string.
// To remain compatible without depending on exact type semantics, we accept all links
// and rely on subsequent JSON parsing to validate. Optionally, filter by name extension.
if link.name.ends_with(".json") {
file_hashes.push(link.hash.clone());
} else {
// Still include; some JSON files may not follow extension naming in IPFS.
file_hashes.push(link.hash.clone());
}
}
}

Ok(file_hashes)
}

pub async fn upload_json_and_get_hash<T, E>(
client: &IpfsClient,
storage_dir: &str,
file_extension: &str,
data: &T,
) -> IpfsResult<String, E>
where
T: Serialize,
E: From<serde_json::Error> + From<IoError>,
{
let file_path = create_file_path(storage_dir, file_extension);
create_storage_directory::<E>(client, storage_dir).await?;
save_json_file::<T, E>(client, &file_path, data).await?;
retrieve_content_hash::<E>(client, &file_path).await
}

+ 4
- 0
src/utils/mod.rs View File

@@ -0,0 +1,4 @@
pub(crate) mod content;
pub(crate) mod ipfs;
pub(crate) mod auth;
pub(crate) mod env;

Loading…
Cancel
Save