Browse Source

Initial commit

Signed-off-by: Magic_RB <magic_rb@redalder.org>
master
Magic_RB 1 year ago
commit
b046b08550
Signed by: Magic_RB
GPG Key ID: 08D5287CC5DDCA0E
  1. 91
      .gitignore
  2. 21
      Cargo.toml
  3. 5
      diesel.toml
  4. 24
      flake.lock
  5. 24
      flake.nix
  6. 3
      migrations/2021-06-05-130637_create_paths/down.sql
  7. 18
      migrations/2021-06-05-130637_create_paths/up.sql
  8. 3
      migrations/2021-06-05-133459_path_references/down.sql
  9. 17
      migrations/2021-06-05-133459_path_references/up.sql
  10. 0
      src/actors/database_producer.rs
  11. 3
      src/actors/mod.rs
  12. 25
      src/actors/upload_consumer.rs
  13. 37
      src/actors/upload_dispatcher.rs
  14. 34
      src/api/mod.rs
  15. 165
      src/main.rs
  16. 24
      src/messages.rs
  17. 29
      src/schema.rs
  18. 50
      src/types/hash.rs
  19. 50
      src/types/mod.rs
  20. 92
      src/types/path.rs
  21. 91
      src/types/store_path.rs
  22. 88
      src/types/upload_id.rs
  23. 16
      test.bash

91
.gitignore vendored

@ -0,0 +1,91 @@
# Created by https://www.toptal.com/developers/gitignore/api/emacs,vim,rust,direnv
# Edit at https://www.toptal.com/developers/gitignore?templates=emacs,vim,rust,direnv
### direnv ###
.direnv
.envrc
### Emacs ###
# -*- mode: gitignore; -*-
*~
\#*\#
/.emacs.desktop
/.emacs.desktop.lock
*.elc
auto-save-list
tramp
.\#*
# Org-mode
.org-id-locations
*_archive
ltximg/**
# flymake-mode
*_flymake.*
# eshell files
/eshell/history
/eshell/lastdir
# elpa packages
/elpa/
# reftex files
*.rel
# AUCTeX auto folder
/auto/
# cask packages
.cask/
dist/
# Flycheck
flycheck_*.el
# server auth directory
/server/
# projectiles files
.projectile
# directory configuration
.dir-locals.el
# network security
/network-security.data
### Rust ###
# Generated by Cargo
# will have compiled files and executables
/target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
### Vim ###
# Swap
[._]*.s[a-v][a-z]
!*.svg # comment out if you don't need vector files
[._]*.sw[a-p]
[._]s[a-rt-v][a-z]
[._]ss[a-gi-z]
[._]sw[a-p]
# Session
Session.vim
Sessionx.vim
# Temporary
.netrwhist
# Auto-generated tag files
tags
# Persistent undo
[._]*.un~
# End of https://www.toptal.com/developers/gitignore/api/emacs,vim,rust,direnv
result/

21
Cargo.toml

@ -0,0 +1,21 @@
[package]
name = "nix-cache"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nom = { version = "6.1.2" }
serde = { version = "1.0.126", features = [ "derive" ] }
json = { version = "0.12.4" }
diesel = { version = "1.4.4", features = [ "sqlite" ] }
rand = { version = "0.8.3" }
actix-web = { version = "4.0.0-beta.6" }
actix-rt = { version = "2.2" }
actix = { version = "0.11.1" }
tokio = { version = "1.3" }
futures-util = { version = "0.3.15" }
ipnet = { version = "2.3.0" }

5
diesel.toml

@ -0,0 +1,5 @@
# For documentation on how to configure this file,
# see diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"

24
flake.lock

@ -0,0 +1,24 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1622056361,
"narHash": "sha256-W4BhdhJVDBn2b3JMn6eoWzb6gbRUP8F34LtRVJTqqH8=",
"path": "/nix/store/p3xi4z6sahijy8iv7mlbhk6by2i5s390-source",
"rev": "05f3800b80f159ee5ef0eccd8e31a645e6723feb",
"type": "path"
},
"original": {
"id": "nixpkgs",
"type": "indirect"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

24
flake.nix

@ -0,0 +1,24 @@
{
inputs = {
nixpkgs.url = "nixpkgs";
};
outputs = { nixpkgs, ... }:
let
supportedSystems = [ "x86_64-linux" "i686-linux" "aarch64-linux" ];
forEachSystem' = systems: f: nixpkgs.lib.genAttrs systems (system: f system);
forEachSystem = forEachSystem' supportedSystems;
in {
devShell = forEachSystem (system:
let
pkgs = import nixpkgs { inherit system; };
in
pkgs.mkShell {
nativeBuildInputs = with pkgs; [
sqlite.dev
diesel-cli
];
}
);
};
}

3
migrations/2021-06-05-130637_create_paths/down.sql

@ -0,0 +1,3 @@
-- This file should undo anything in `up.sql`
DROP TABLE paths

18
migrations/2021-06-05-130637_create_paths/up.sql

@ -0,0 +1,18 @@
-- Your SQL goes here
CREATE TABLE paths (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
hash VARCHAR(33) NOT NULL,
store_path TEXT NOT NULL,
url TEXT NOT NULL,
local BOOLEAN NOT NULL,
deriver TEXT NOT NULL,
sigs TEXT NOT NULL, -- space-separated
ca TEXT,
file_hash TEXT NOT NULL,
file_size INTEGER NOT NULL,
compression TEXT NOT NULL,
nar_hash TEXT NOT NULL,
nar_size INTEGER NOT NULL
)

3
migrations/2021-06-05-133459_path_references/down.sql

@ -0,0 +1,3 @@
-- This file should undo anything in `up.sql`
DROP TABLE path_references

17
migrations/2021-06-05-133459_path_references/up.sql

@ -0,0 +1,17 @@
-- Your SQL goes here
CREATE TABLE path_references (
referrer INTEGER NOT NULL,
reference INTEGER NOT NULL,
PRIMARY KEY (referrer, reference),
FOREIGN KEY (referrer)
REFERENCES paths (id)
ON UPDATE RESTRICT
ON DELETE CASCADE,
FOREIGN KEY (reference)
REFERENCES paths (id)
ON UPDATE RESTRICT
ON DELETE RESTRICT
)

0
src/actors/database_producer.rs

3
src/actors/mod.rs

@ -0,0 +1,3 @@
pub mod database_producer;
pub mod upload_consumer;
pub mod upload_dispatcher;

25
src/actors/upload_consumer.rs

@ -0,0 +1,25 @@
use actix::{Actor, Addr, Context, Handler, Response};
use crate::messages::UploadNar;
pub struct UploadConsumer {}
impl Actor for UploadConsumer {
type Context = Context<Self>;
}
impl UploadConsumer {
pub fn new() -> Addr<Self> {
Self::create(|ctx| {
Self {}
})
}
}
impl Handler<UploadNar> for UploadConsumer {
type Result = ();
fn handle(&mut self, msg: UploadNar, ctx: &mut Self::Context) -> Self::Result {
()
}
}

37
src/actors/upload_dispatcher.rs

@ -0,0 +1,37 @@
use std::collections::HashMap;
use actix::{Actor, Addr, Context, Handler, Recipient, Response};
use crate::{messages::{BeginUpload, UploadNar}, types::UploadId};
use super::upload_consumer::UploadConsumer;
pub struct UploadDispatcher {
map: HashMap<UploadId, Recipient<UploadNar>>
}
impl Actor for UploadDispatcher {
type Context = Context<Self>;
}
impl UploadDispatcher {
pub fn new() -> Addr<Self> {
Self::create(|ctx| {
Self {
map: HashMap::new()
}
})
}
}
impl Handler<BeginUpload> for UploadDispatcher {
type Result = Response<UploadId>;
fn handle(&mut self, msg: BeginUpload, ctx: &mut Self::Context) -> Self::Result {
let upload_id = UploadId::new();
self.map.insert(upload_id.clone(), UploadConsumer::new().recipient());
Response::reply(upload_id)
}
}

34
src/api/mod.rs

@ -0,0 +1,34 @@
pub mod upload {
use crate::{messages::BeginUpload, types::{Path, StorePath}};
use actix::Recipient;
use actix_web::{HttpResponse, post, web::{Data, Json}};
use serde::{Deserialize, Serialize};
#[derive(Deserialize)]
pub struct Request {
store_path: StorePath,
nar_hash: String,
nar_size: u32,
references: Vec<Path>,
deriver: Path,
sig: Vec<String>
}
#[derive(Serialize)]
pub enum Response {
Ok {
upload_path: String,
},
AlreadyExists
}
#[post("api_v1/upload")]
pub async fn endpoint(req: Json<Request>, begin_upload: Data<Recipient<BeginUpload>>) -> HttpResponse {
let req = req.into_inner();
match begin_upload.send(BeginUpload {}).await {
Ok(o) => HttpResponse::Ok().json(Response::Ok { upload_path: o.to_string() }),
Err(e) => HttpResponse::Conflict().json(Response::AlreadyExists),
}
}
}

165
src/main.rs

@ -0,0 +1,165 @@
#[macro_use]
extern crate diesel;
use actix::Recipient;
use actix_web::{HttpResponse, web};
use diesel::{EqAll, QueryDsl, RunQueryDsl, connection::Connection, sqlite::SqliteConnection};
use std::{convert::{TryFrom, TryInto}, error::Error, path::{Path as StdPath, PathBuf}};
mod schema;
mod api;
mod types;
mod messages;
mod actors;
use schema::paths;
use crate::messages::BeginUpload;
#[derive(Debug)]
struct Path {
pub hash: String,
pub store_path: PathBuf,
pub url: String,
pub local: bool,
pub deriver: PathBuf,
pub sigs: Vec<String>,
pub ca: Option<String>,
pub file_hash: String,
pub file_size: u32,
pub compression: String,
pub nar_hash: String,
pub nar_size: u32,
}
#[derive(Queryable)]
struct QueryPath {
_id: i32,
pub hash: String,
pub store_path: String,
pub url: String,
pub local: bool,
pub deriver: String,
pub sigs: String,
pub ca: Option<String>,
pub file_hash: String,
pub file_size: i32,
pub compression: String,
pub nar_hash: String,
pub nar_size: i32,
}
impl<> From<QueryPath> for Path {
fn from(path: QueryPath) -> Self {
Path {
hash: path.hash,
store_path: PathBuf::from(&path.store_path),
url: path.url,
local: path.local,
deriver: PathBuf::from(&path.deriver),
sigs: path.sigs.split(' ').map(|s| s.to_string()).collect(),
ca: path.ca,
file_hash: path.file_hash,
file_size: path.file_size as u32,
compression: path.compression,
nar_hash: path.nar_hash,
nar_size: path.nar_size as u32,
}
}
}
#[derive(Insertable)]
#[table_name = "paths"]
struct InsertPath<'a> {
pub hash: &'a str,
pub store_path: &'a str,
pub url: &'a str,
pub local: bool,
pub deriver: &'a str,
pub sigs: String,
pub ca: Option<&'a str>,
pub file_hash: &'a str,
pub file_size: i32,
pub compression: &'a str,
pub nar_hash: &'a str,
pub nar_size: i32,
}
impl<'a> TryFrom<&'a Path> for InsertPath<'a> {
type Error = &'a str;
fn try_from(path: &'a Path) -> Result<Self, Self::Error> {
Ok(InsertPath {
hash: &path.hash,
store_path: path.store_path.as_os_str().to_str().ok_or("")?,
url: &path.url,
local: path.local,
deriver: path.deriver.as_os_str().to_str().ok_or("")?,
sigs: path.sigs.join(" "),
ca: path.ca.as_ref().map(|s| s.as_str()),
file_hash: &path.file_hash,
file_size: path.file_size as i32,
compression: &path.compression,
nar_hash: &path.nar_hash,
nar_size: path.nar_size as i32,
})
}
}
#[actix_rt::main]
async fn main() -> Result<(), Box<dyn Error>> {
let conn = SqliteConnection::establish("sqlite3.db")?;
let path = Path {
hash: "dzyimsdk9yq7x6g24r79ipg3vbalyyy1".into(),
store_path: PathBuf::from("/nix/store/dzyimsdk9yq7x6g24r79ipg3vbalyyy1-libidn2-2.3.1"),
url: "nar/1qcyd8cf4xdj1i1bfnmchxsb0dr380wc3sh3b8jbb4v0y6bcrlal.nar.xz".into(),
local: false,
deriver: PathBuf::from(""),
sigs: vec![ "cache.nixos.org-1:LuHqfckGdiPXBgpc1KYl49TCqHBjg85lFuJGb8UL93Z7OMc2Tl2+8MC081CWZ2lBx4ZkN0rc1jT21uInH0rlBw==".into() ],
ca: None,
file_hash: "sha256:1qcyd8cf4xdj1i1bfnmchxsb0dr380wc3sh3b8jbb4v0y6bcrlal".into(),
file_size: 59196,
compression: "xz".into(),
nar_hash: "sha256:1rizfnla4lyjls0d6dpf195r5xm6mz1z34xg64pnirrdrlsqrksa".into(),
nar_size: 240792,
};
diesel::insert_into(paths::table)
.values::<InsertPath>((&path).try_into()?)
.execute(&conn)?;
let path: Vec<Path> = paths::dsl::paths.filter(paths::dsl::hash.eq_all("dzyimsdk9yq7x6g24r79ipg3vbalyyy1"))
.limit(5)
.load::<QueryPath>(&conn)?.into_iter().map(|path| path.into()).collect();
println!("{:#?}", path);
let upload_dispatcher = actors::upload_dispatcher::UploadDispatcher::new();
{
let upload_dispatcher = web::Data::new(upload_dispatcher.clone().recipient());
actix_web::HttpServer::new(move || {
actix_web::App::new()
.service(api::upload::endpoint)
.app_data(upload_dispatcher.clone())
}).bind("localhost:8099").map(|server| server.run())?.await?;
}
actix_rt::signal::ctrl_c().await.unwrap();
actix_rt::System::current().stop();
Ok(())
}

24
src/messages.rs

@ -0,0 +1,24 @@
use actix::Message;
use crate::types::{Path, UploadId, StorePath};
#[derive(Message)]
#[rtype(result = "()")]
pub struct UploadNar {}
#[derive(Message)]
#[rtype(result = "UploadId")]
pub struct BeginUpload {}
#[derive(Message)]
#[rtype(result = "()")]
pub struct WritePath {
store_path: StorePath,
nar_hash: String,
nar_size: u32,
references: Vec<Path>,
deriver: StorePath,
sig: Vec<String>,
}

29
src/schema.rs

@ -0,0 +1,29 @@
table! {
path_references (referrer, reference) {
referrer -> Integer,
reference -> Integer,
}
}
table! {
paths (id) {
id -> Integer,
hash -> Text,
store_path -> Text,
url -> Text,
local -> Bool,
deriver -> Text,
sigs -> Text,
ca -> Nullable<Text>,
file_hash -> Text,
file_size -> Integer,
compression -> Text,
nar_hash -> Text,
nar_size -> Integer,
}
}
allow_tables_to_appear_in_same_query!(
path_references,
paths,
);

50
src/types/hash.rs

@ -0,0 +1,50 @@
use core::fmt;
use std::fmt::Display;
use serde::{Deserialize, Serialize};
use super::Path;
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct Hash(String);
impl From<Path> for Hash {
fn from(path: Path) -> Self {
path.hash
}
}
impl Display for Hash {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.0.as_ref())
}
}
impl fmt::Debug for Hash {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
impl Hash {
pub fn parse<S: AsRef<str>>(str: S) -> Result<Self, (usize, char)> {
let str = str.as_ref();
let match_closure = |acc, c|
match acc {
Ok(i) => match c {
'a'..='z' | '0'..='9' => Ok(i + 1),
_ => Err((i, c)),
}
Err(x) => Err(x)
};
match str.chars().fold(Ok(0usize), match_closure) {
Ok(_) => Ok(Hash(str.into())),
Err(err) => Err(err),
}
}
pub fn to_string(self) -> String {
self.0
}
}

50
src/types/mod.rs

@ -0,0 +1,50 @@
mod store_path;
pub use store_path::StorePath;
mod path;
pub use path::Path;
mod hash;
pub use hash::Hash;
mod upload_id;
pub use upload_id::UploadId;
#[cfg(test)]
mod test {
use super::*;
#[test]
fn hash() {
assert!(Hash::parse("dzyimsdk9yq7x6g24r79ipg3vbalyyy1").is_ok());
assert_eq!(Hash::parse("dzyimsdk9yq7$6g24r79ipg3vbalyyy1"), Err((12, '$')));
assert_eq!(Hash::parse("dzyimsdk9yq7x6g24r79ipg3vbalyyy1").unwrap().to_string(), "dzyimsdk9yq7x6g24r79ipg3vbalyyy1");
}
#[test]
fn path() {
assert!(Path::parse("dzyimsdk9yq7x6g24r79ipg3vbalyyy1-libidn2-2.3.1").is_some());
assert!(Path::parse("dzyimsdk9yq7x6g24r79ipg3vbalyyy1-libidn2=2.3.1").is_none());
assert!(Path::parse("dzyimsdk9yq7x6g24r79ipg3vbalyyy1-libidn22.3.1").is_none());
assert!(Path::parse("dzyimsdk9yq7#6g24r79ipg3vbalyyy1-libidn2-2.3.1").is_none());
}
#[test]
fn store_path() {
assert!(StorePath::parse("/nix/store/dzyimsdk9yq7x6g24r79ipg3vbalyyy1-libidn2-2.3.1").is_some());
assert!(StorePath::parse("/nix/store/dzyimsdk9yq7x6g24r79ipg3vbalyyy1-libidn2=2.3.1").is_none());
assert!(StorePath::parse("/nix/store/dzyimsdk9yq7$6g24r79ipg3vbalyyy1-libidn2-2.3.1").is_none());
assert!(StorePath::parse("/nix/store/dzyimsdk9yq7x6g24r79ipg3vbalyyy1-libidn22.3.1").is_none());
assert!(StorePath::parse("/nix/store").is_none());
assert!(StorePath::parse("dzyimsdk9yq7x6g24r79ipg3vbalyyy1-libidn2-2.3.1").is_none());
}
#[test]
fn upload_id() {
assert!(UploadId::parse("abcde-01234-fghij-56789-klmno").is_some());
assert!(UploadId::parse("abcde=01234-fghij-56789-klmno").is_none());
assert!(UploadId::parse("abcd#-01234-fghij-56789-klmno").is_none());
assert!(UploadId::parse("abcde-0124-fghij-56789-klmno").is_none());
assert!(UploadId::parse("abcde-012444-fghij-56789-klmno").is_none());
}
}

92
src/types/path.rs

@ -0,0 +1,92 @@
use core::fmt;
use std::fmt::Display;
use serde::{Deserialize, Serialize, de::Visitor};
use super::{StorePath, Hash};
#[derive(Clone, PartialEq, Eq)]
pub struct Path {
pub(super) hash: Hash,
name: String,
version: String,
}
impl From<StorePath> for Path {
fn from(store_path: StorePath) -> Self {
store_path.path
}
}
impl Display for Path {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}-{}-{}", self.hash, self.name, self.version)
}
}
impl fmt::Debug for Path {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}-{:?}-{:?}", self.hash, self.name, self.version)
}
}
struct PathVisitor;
impl<'de> Visitor<'de> for PathVisitor {
type Value = Path;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a nix path")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Self::Value::parse(v).ok_or(E::custom("Invalid path"))
}
fn visit_borrowed_str<E>(self, v: &'de str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(v)
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(&v)
}
}
impl<'de> Deserialize<'de> for Path {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de> {
deserializer.deserialize_str(PathVisitor)
}
}
impl Serialize for Path {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer {
serializer.serialize_str(&self.to_string())
}
}
impl Path {
pub fn parse<S: AsRef<str>>(str: S) -> Option<Self> {
let splits = str.as_ref().split('-').collect::<Vec<_>>();
match splits.len() {
3 => Some(Self {
hash: Hash::parse(splits[0]).ok()?,
name: splits[1].into(),
version: splits[2].into()
}),
_ => None
}
}
}

91
src/types/store_path.rs

@ -0,0 +1,91 @@
use core::fmt;
use std::fmt::Display;
use serde::{Deserialize, Serialize, de::Visitor};
use super::Path;
#[derive(Clone, PartialEq, Eq)]
pub struct StorePath {
store: String,
pub(super) path: Path,
}
impl Display for StorePath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}/{}", self.store, self.path)
}
}
impl fmt::Debug for StorePath {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}/{:?}", self.store, self.path)
}
}
impl Serialize for StorePath {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer
{
serializer.serialize_str(&self.to_string())
}
}
struct StorePathVisitor;
impl<'de> Visitor<'de> for StorePathVisitor {
type Value = StorePath;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a nix store path")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Self::Value::parse(v).ok_or(E::custom("Invalid store path"))
}
fn visit_borrowed_str<E>(self, v: &'de str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(v)
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
self.visit_str(&v)
}
}
impl<'de> Deserialize<'de> for StorePath {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>
{
deserializer.deserialize_str(StorePathVisitor)
}
}
impl StorePath {
pub fn parse<S: AsRef<str>>(str: S) -> Option<Self> {
let str = str.as_ref();
if str.contains('/') {
let (prefix, postfix) = str.rsplit_once('/')?;
let path = Path::parse(postfix)?;
Some(Self {
store: prefix.into(),
path,
})
} else {
None
}
}
}

88
src/types/upload_id.rs

@ -0,0 +1,88 @@
use std::{convert::TryInto, fmt, fmt::Display};
use rand::{Rng, distributions::Uniform, thread_rng};
#[derive(PartialEq, Eq, Hash, Clone)]
pub struct UploadId {
a: [char; 5],
b: [char; 5],
c: [char; 5],
d: [char; 5],
e: [char; 5],
}
impl Display for UploadId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f,
"{}-{}-{}-{}-{}",
self.a.iter().collect::<String>(),
self.b.iter().collect::<String>(),
self.c.iter().collect::<String>(),
self.d.iter().collect::<String>(),
self.e.iter().collect::<String>())
}
}
impl fmt::Debug for UploadId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(self, f)
}
}
impl UploadId {
pub fn new() -> Self {
let rng = thread_rng();
let lookup = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9'];
let id: Vec<char> = rng.sample_iter(Uniform::new_inclusive(0, 35)).take(25).map(|n| match n {
0..=36 => lookup[n],
_ => panic!("random broke"),
}).collect();
Self {
a: id[0..5].try_into().unwrap(),
b: id[5..10].try_into().unwrap(),
c: id[10..15].try_into().unwrap(),
d: id[15..20].try_into().unwrap(),
e: id[20..25].try_into().unwrap(),
}
}
pub fn parse<S: AsRef<str>>(str: S) -> Option<Self> {
let splits = str.as_ref().split('-').collect::<Vec<&str>>();
if splits.len() == 5 {
let match_closure = |acc, c|
match acc {
true => match c {
'a'..='z' | '0'..='9' => true,
_ => false,
},
false => false,
};
let only_valid = splits.iter().fold(true, |acc, &s| match acc {
true => s.chars().fold(true, match_closure) && s.len() == 5,
false => false,
});
if only_valid {
Some(Self {
a: splits[0].chars().collect::<Vec<char>>().try_into().unwrap(),
b: splits[1].chars().collect::<Vec<char>>().try_into().unwrap(),
c: splits[2].chars().collect::<Vec<char>>().try_into().unwrap(),
d: splits[3].chars().collect::<Vec<char>>().try_into().unwrap(),
e: splits[4].chars().collect::<Vec<char>>().try_into().unwrap(),
})
} else {
None
}
} else {
None
}
}
pub fn to_string(self) -> String {
format!("{}", self)
}
}

16
test.bash

@ -0,0 +1,16 @@
#! /bin/bash
curl -d \
'{
"store_path": "/nix/store/dzyimsdk9yq7x6g24r79ipg3vbalyyy1-libidn2-2.3.1",
"nar_hash": "sha256:1rizfnla4lyjls0d6dpf195r5xm6mz1z34xg64pnirrdrlsqrksa",
"nar_size": 240792,
"references": [
"dzyimsdk9yq7x6g24r79ipg3vbalyyy1-libidn2-2.3.1",
"i1dc1ac2hxjfl59rvsj49vvgvl1nl16s-libunistring-0.9.10"
],
"sig": [
"cache.nixos.org-1:LuHqfckGdiPXBgpc1KYl49TCqHBjg85lFuJGb8UL93Z7OMc2Tl2+8MC081CWZ2lBx4ZkN0rc1jT21uInH0rlBw=="
],
"deriver": "vvikw51p1mrdw7lkqnnj16ha3612vp18-libidn2-2.3.1.drv"
}' -H 'Content-Type: application/json' http://localhost:8099/api_v1/upload
Loading…
Cancel
Save