feat(janus-sdk): add libertaria sdk v0.1.0-alpha

Add Janus language SDK for Libertaria with core modules:

- identity: cryptographic agent identity with rotation/burn

- message: signed, content-addressed messages

- context: NCP implementation

- memory: LanceDB vector store integration

- lib: unified API exports
This commit is contained in:
Markus Maiwald 2026-02-03 08:09:43 +01:00
parent 514e521abe
commit ccf521131a
Signed by: markus
GPG Key ID: 07DDBEA3CBDC090A
6 changed files with 765 additions and 0 deletions

83
janus-sdk/README.md Normal file
View File

@ -0,0 +1,83 @@
# Libertaria SDK for Janus
> Sovereign; Kinetic; Anti-Fragile.
The Libertaria SDK provides primitives for building sovereign agent networks on top of Janus.
**Status:** v0.1.0-alpha (2026-02-03)
## Core Modules
| Module | File | Status | Purpose |
|--------|------|--------|---------|
| `identity` | `identity.jan` | ✅ Draft | Cryptographic agent identity with rotation/burn |
| `message` | `message.jan` | ✅ Draft | Signed, content-addressed messages |
| `context` | `context.jan` | ✅ Draft | NCP (Nexus Context Protocol) implementation |
| `memory` | `memory.jan` | ✅ Draft | Vector-backed semantic memory (LanceDB) |
| `lib` | `lib.jan` | ✅ Draft | Unified API export |
## Quick Start
```janus
import libertaria
-- Create sovereign agent
let agent = libertaria.create_sovereign_agent()
-- Create identity with rotation capability
let (new_id, old_id) = identity.rotate(agent.identity)
-- Send signed message
let msg = message.create(
from = agent.identity,
content_type = Text,
content = bytes.from_string("Hello Sovereigns!")
)
-- Create hierarchical context
let ctx = context.create({})
let sub_ctx = context.fork(ctx, reason = "Sub-conversation")?
-- Store in semantic memory
let emb = memory.embed(message.content(msg))
let vs = memory.store(agent.memory, message.id(msg), emb, "...")
```
## Design Principles
1. **Exit is Voice** — Agents can leave, taking their data cryptographically (`identity.burn`)
2. **Profit = Honesty** — Economic stakes align incentives (staking module planned)
3. **Code is Law** — No central moderation, only protocol rules
4. **Binary APIs** — gRPC/MsgPack/QUIC over REST
## Architecture
```
┌─────────────────────────────────────────────┐
│ Libertaria SDK │
├──────────┬──────────┬──────────┬────────────┤
│ Identity │ Message │ Context │ Memory │
│ │ │ (NCP) │ (LanceDB) │
├──────────┴──────────┴──────────┴────────────┤
│ Janus Standard Library │
├─────────────────────────────────────────────┤
│ Janus Compiler (:service) │
└─────────────────────────────────────────────┘
```
## Next Steps
- [ ] Staking/Economics module (spam prevention)
- [ ] Channel module (QUIC transport)
- [ ] Discovery module (DHT-based agent lookup)
- [ ] Governance module (voting, proposals)
- [ ] Test suite
- [ ] Integration with Janus compiler
## License
MIT + Libertaria Commons Clause
*Forge burns bright. The Exit is being built.*
⚡️

View File

@ -0,0 +1,170 @@
-- libertaria/context.jan
-- NCP (Nexus Context Protocol) implementation
-- Structured, hierarchical context management for agent conversations
module Context exposing
( Context
, create, fork, merge, close
, current_depth, max_depth
, add_message, get_messages
, subscribe, unsubscribe
, to_astdb_query
)
import message.{Message}
import memory.{VectorStore}
import time.{timestamp}
-- Context is a structured conversation container
type Context =
{ id: context_id.ContextId
, parent: ?context_id.ContextId -- Hierarchical nesting
, depth: int -- Nesting level (prevents infinite loops)
, created_at: timestamp.Timestamp
, messages: list.List(Message)
, metadata: metadata.ContextMetadata
, vector_store: ?VectorStore -- Semantic indexing
, subscribers: set.Set(fingerprint.Fingerprint)
, closed: bool
}
type ContextConfig =
{ max_depth: int = 100 -- Max nesting before forced flattening
, max_messages: int = 10000 -- Auto-archive older messages
, enable_vector_index: bool = true
, retention_policy: RetentionPolicy
}
type RetentionPolicy =
| Keep_Forever
| Auto_Archive_After(duration.Duration)
| Delete_After(duration.Duration)
-- Create root context
-- Top-level conversation container
fn create(config: ContextConfig) -> Context
let id = context_id.generate()
let now = timestamp.now()
let vs = if config.enable_vector_index
then some(memory.create_vector_store())
else null
{ id = id
, parent = null
, depth = 0
, created_at = now
, messages = list.empty()
, metadata = metadata.create(id)
, vector_store = vs
, subscribers = set.empty()
, closed = false
}
-- Fork child context from parent
-- Used for: sub-conversations, branching decisions, isolated experiments
fn fork(parent: Context, reason: string, config: ContextConfig) -> result.Result(Context, error.ForkError)
if parent.depth >= config.max_depth then
error.err(MaxDepthExceeded)
else if parent.closed then
error.err(ParentClosed)
else
let id = context_id.generate()
let now = timestamp.now()
let vs = if config.enable_vector_index
then some(memory.create_vector_store())
else null
let child =
{ id = id
, parent = some(parent.id)
, depth = parent.depth + 1
, created_at = now
, messages = list.empty()
, metadata = metadata.create(id)
|> metadata.set_parent(parent.id)
|> metadata.set_fork_reason(reason)
, vector_store = vs
, subscribers = set.empty()
, closed = false
}
ok(child)
-- Merge child context back into parent
-- Consolidates messages, preserves fork history
fn merge(child: Context, into parent: Context) -> result.Result(Context, error.MergeError)
if child.parent != some(parent.id) then
error.err(NotMyParent)
else if child.closed then
error.err(ChildClosed)
else
let merged_messages = parent.messages ++ child.messages
let merged_subs = set.union(parent.subscribers, child.subscribers)
let updated_parent =
{ parent with
messages = merged_messages
, subscribers = merged_subs
, metadata = parent.metadata
|> metadata.add_merge_history(child.id, child.messages.length())
}
ok(updated_parent)
-- Close context (final state)
-- No more messages, preserves history
fn close(ctx: Context) -> Context
{ ctx with closed = true }
-- Get current nesting depth
fn current_depth(ctx: Context) -> int
ctx.depth
-- Get max allowed depth (from config)
fn max_depth(ctx: Context) -> int
ctx.metadata.config.max_depth
-- Add message to context
-- Indexes in vector store if enabled
fn add_message(ctx: Context, msg: Message) -> result.Result(Context, error.AddError)
if ctx.closed then
error.err(ContextClosed)
else
let updated = { ctx with messages = ctx.messages ++ [msg] }
-- Index in vector store for semantic search
match ctx.vector_store with
| null -> ok(updated)
| some(vs) ->
let embedding = memory.embed(message.content(msg))
let indexed_vs = memory.store(vs, message.id(msg), embedding)
ok({ updated with vector_store = some(indexed_vs) })
-- Get all messages in context
fn get_messages(ctx: Context) -> list.List(Message)
ctx.messages
-- Subscribe agent to context updates
fn subscribe(ctx: Context, agent: fingerprint.Fingerprint) -> Context
{ ctx with subscribers = set.insert(ctx.subscribers, agent) }
-- Unsubscribe agent
fn unsubscribe(ctx: Context, agent: fingerprint.Fingerprint) -> Context
{ ctx with subscribers = set.remove(ctx.subscribers, agent) }
-- Convert to ASTDB query for semantic search
-- Enables: "Find similar contexts", "What did we discuss about X?"
fn to_astdb_query(ctx: Context) -> astdb.Query
let message_hashes = list.map(ctx.messages, message.hash)
let time_range =
{ start = ctx.created_at
, end = match list.last(ctx.messages) with
| null -> timestamp.now()
| some(last_msg) -> message.timestamp(last_msg)
}
astdb.query()
|> astdb.with_context_id(ctx.id)
|> astdb.with_message_hashes(message_hashes)
|> astdb.with_time_range(time_range)
|> astdb.with_depth(ctx.depth)

View File

@ -0,0 +1,98 @@
-- libertaria/identity.jan
-- Cryptographic identity for sovereign agents
-- Exit is Voice: Identity can be rotated, expired, or burned
module Identity exposing
( Identity
, create, rotate, burn
, is_valid, is_expired
, public_key, fingerprint
, sign, verify
)
import crypto.{ed25519, hash}
import time.{timestamp, duration}
-- Core identity type with cryptographic material and metadata
type Identity =
{ public_key: ed25519.PublicKey
, secret_key: ed25519.SecretKey -- Encrypted at rest
, created_at: timestamp.Timestamp
, expires_at: ?timestamp.Timestamp -- Optional expiry
, rotated_from: ?fingerprint.Fingerprint -- Chain of custody
, revoked: bool
}
-- Create new sovereign identity
-- Fresh keypair, no history, self-sovereign
fn create() -> Identity
let (pk, sk) = ed25519.generate_keypair()
let now = timestamp.now()
{ public_key = pk
, secret_key = sk
, created_at = now
, expires_at = null
, rotated_from = null
, revoked = false
}
-- Rotate identity: New keys, linked provenance
-- Old identity becomes invalid after grace period
fn rotate(old: Identity) -> (Identity, Identity)
assert not old.revoked "Cannot rotate revoked identity"
let (new_pk, new_sk) = ed25519.generate_keypair()
let now = timestamp.now()
let old_fp = fingerprint.of_identity(old)
let new_id =
{ public_key = new_pk
, secret_key = new_sk
, created_at = now
, expires_at = null
, rotated_from = some(old_fp)
, revoked = false
}
-- Old identity gets short grace period then auto-expires
let grace_period = duration.hours(24)
let expired_old = { old with expires_at = some(now + grace_period) }
(new_id, expired_old)
-- Burn identity: Cryptographic deletion
-- After burn, no messages can be signed, verification still works for history
fn burn(id: Identity) -> Identity
{ id with
secret_key = ed25519.zero_secret(id.secret_key)
, revoked = true
, expires_at = some(timestamp.now())
}
-- Check if identity is currently valid
fn is_valid(id: Identity) -> bool
not id.revoked and not is_expired(id)
-- Check if identity has expired
fn is_expired(id: Identity) -> bool
match id.expires_at with
| null -> false
| some(t) -> timestamp.now() > t
-- Get public key for sharing/verification
fn public_key(id: Identity) -> ed25519.PublicKey
id.public_key
-- Get fingerprint (short, unique identifier)
fn fingerprint(id: Identity) -> fingerprint.Fingerprint
fingerprint.of_key(id.public_key)
-- Sign message with this identity
fn sign(id: Identity, message: bytes.Bytes) -> signature.Signature
assert is_valid(id) "Cannot sign with invalid identity"
ed25519.sign(id.secret_key, message)
-- Verify signature against this identity's public key
fn verify(id: Identity, message: bytes.Bytes, sig: signature.Signature) -> bool
ed25519.verify(id.public_key, message, sig)

View File

@ -0,0 +1,63 @@
-- libertaria/lib.jan
-- Main entry point for Libertaria SDK
-- Sovereign; Kinetic; Anti-Fragile.
module Libertaria exposing
( -- Identity
identity.Identity
, identity.create, identity.rotate, identity.burn
, identity.is_valid, identity.is_expired
, identity.public_key, identity.fingerprint
, identity.sign, identity.verify
-- Message
, message.Message
, message.create, message.create_reply
, message.sender, message.content, message.timestamp
, message.verify, message.is_authentic
, message.to_bytes, message.from_bytes
, message.hash, message.id
-- Context (NCP)
, context.Context
, context.create, context.fork, context.merge, context.close
, context.current_depth, context.max_depth
, context.add_message, context.get_messages
, context.subscribe, context.unsubscribe
, context.to_astdb_query
-- Memory
, memory.VectorStore
, memory.create_vector_store
, memory.store, memory.retrieve, memory.search
, memory.embed
, memory.sync, memory.export, memory.import
)
import identity
import message
import context
import memory
-- SDK Version
const VERSION = "0.1.0-alpha"
const COMPATIBLE_JANUS_VERSION = ">= 1.0.0"
-- Quick-start: Create sovereign agent with full stack
fn create_sovereign_agent() -> SovereignAgent
let id = identity.create()
let root_context = context.create({})
let memory_store = memory.create_vector_store()
{ identity = id
, root_context = root_context
, memory = memory_store
, version = VERSION
}
type SovereignAgent =
{ identity: identity.Identity
, root_context: context.Context
, memory: memory.VectorStore
, version: string
}

View File

@ -0,0 +1,207 @@
-- libertaria/memory.jan
-- Semantic memory with VectorDB (LanceDB) integration
-- Agents remember context through embeddings, not just raw logs
module Memory exposing
( VectorStore
, create_vector_store
, store, retrieve, search
, embed -- Uses Janus neuro module
, sync, export, import
)
import neuro.{embedding}
import serde.{lance}
import time.{timestamp}
-- Vector store configuration
type VectorStore =
{ uri: string -- LanceDB connection URI
, dimension: int -- Embedding dimension (e.g., 768 for BERT, 1536 for OpenAI)
, metric: DistanceMetric
, table: lance.Table
, cache: lru.Cache(vector_id.VectorId, embedding.Embedding)
}
type DistanceMetric =
| Cosine -- Best for semantic similarity
| Euclidean -- Best for geometric distance
| DotProduct -- Fastest, good for normalized embeddings
-- Default configuration for agent memory
fn default_config() -> { dimension: 1536, metric: Cosine }
-- Create new vector store
-- If uri points to existing store, opens it; otherwise creates new
fn create_vector_store
( uri: string = "memory.lance"
, config: { dimension: int, metric: DistanceMetric } = default_config()
) -> VectorStore
let table = lance.connect(uri)
|> lance.create_table("embeddings")
|> lance.with_vector_column("embedding", config.dimension)
|> lance.with_metric(config.metric)
|> lance.with_columns
[ { name = "content_hash", type = "string" }
, { name = "content_type", type = "string" }
, { name = "created_at", type = "timestamp" }
, { name = "context_id", type = "string", nullable = true }
, { name = "metadata", type = "json", nullable = true }
]
|> lance.execute()
{ uri = uri
, dimension = config.dimension
, metric = config.metric
, table = table
, cache = lru.create(max_size = 1000)
}
-- Generate embedding from content
-- Uses Janus neuro module for local inference
fn embed(content: bytes.Bytes, model: ?string = null) -> embedding.Embedding
let content_str = bytes.to_string(content)
neuro.embed(content_str, model = model)
-- Store embedding in vector database
fn store
( vs: VectorStore
, id: vector_id.VectorId
, emb: embedding.Embedding
, content_hash: string -- Blake3 hash of original content
, content_type: string = "text"
, context_id: ?string = null
, metadata: ?json.Json = null
) -> VectorStore
let record =
{ id = id
, embedding = emb
, content_hash = content_hash
, content_type = content_type
, created_at = timestamp.now()
, context_id = context_id
, metadata = metadata
}
lance.insert(vs.table, record)
-- Update cache
let new_cache = lru.put(vs.cache, id, emb)
{ vs with cache = new_cache }
-- Retrieve exact embedding by ID
fn retrieve(vs: VectorStore, id: vector_id.VectorId) -> ?embedding.Embedding
-- Check cache first
match lru.get(vs.cache, id) with
| some(emb) -> some(emb)
| null ->
-- Query LanceDB
let results = lance.query(vs.table)
|> lance.where("id = ", id)
|> lance.limit(1)
|> lance.execute()
match list.head(results) with
| null -> null
| some(record) -> some(record.embedding)
-- Semantic search: Find similar embeddings
fn search
( vs: VectorStore
, query_embedding: embedding.Embedding
, top_k: int = 10
, filter: ?string = null -- Optional SQL filter
) -> list.SearchResult
let base_query = lance.query(vs.table)
|> lance.nearest_neighbors("embedding", query_embedding)
|> lance.limit(top_k)
let filtered_query = match filter with
| null -> base_query
| some(f) -> base_query |> lance.where(f)
lance.execute(filtered_query)
|> list.map(fn r ->
{ id = r.id
, score = r.distance -- Lower is better for cosine/euclidean
, content_hash = r.content_hash
, content_type = r.content_type
, created_at = r.created_at
, context_id = r.context_id
, metadata = r.metadata
}
)
-- Sync to disk (ensure durability)
fn sync(vs: VectorStore) -> result.Result((), error.SyncError)
lance.flush(vs.table)
-- Export to portable format
fn export(vs: VectorStore, path: string) -> result.Result((), error.ExportError)
lance.backup(vs.table, path)
-- Import from portable format
fn import(path: string) -> result.Result(VectorStore, error.ImportError)
let restored = lance.restore(path)
ok(create_vector_store(uri = restored.uri))
-- Advanced: Hybrid search combining semantic + keyword
type HybridResult =
{ semantic_results: list.SearchResult
, keyword_results: list.SearchResult
, combined: list.SearchResult
, reranking_score: float
}
fn hybrid_search
( vs: VectorStore
, query_embedding: embedding.Embedding
, query_text: string
, top_k: int = 10
, semantic_weight: float = 0.7
) -> HybridResult
let semantic = search(vs, query_embedding, top_k * 2)
let keyword = lance.full_text_search(vs.table, query_text, top_k * 2)
-- Reciprocal Rank Fusion for combining
let combined = reciprocal_rank_fusion(semantic, keyword, semantic_weight)
{ semantic_results = list.take(semantic, top_k)
, keyword_results = list.take(keyword, top_k)
, combined = list.take(combined, top_k)
, reranking_score = 0.0 -- Placeholder for cross-encoder reranking
}
-- Internal: Reciprocal Rank Fusion algorithm
fn reciprocal_rank_fusion
( semantic: list.SearchResult
, keyword: list.SearchResult
, semantic_weight: float
) -> list.SearchResult
let k = 60.0 -- RRF constant
let score_map = map.empty()
-- Score semantic results
list.foreach_with_index(semantic, fn r, idx ->
let rank = idx + 1
let score = semantic_weight * (1.0 / (k + rank))
score_map[r.id] = map.get_or_default(score_map, r.id, 0.0) + score
)
-- Score keyword results
list.foreach_with_index(keyword, fn r, idx ->
let rank = idx + 1
let score = (1.0 - semantic_weight) * (1.0 / (k + rank))
score_map[r.id] = map.get_or_default(score_map, r.id, 0.0) + score
)
-- Sort by combined score
map.to_list(score_map)
|> list.sort_by(fn (id, score) -> score, descending = true)
|> list.map(fn (id, _) -> id)

View File

@ -0,0 +1,144 @@
-- libertaria/message.jan
-- Signed, tamper-proof messages between agents
-- Messages are immutable once created, cryptographically bound to sender
module Message exposing
( Message
, create, create_reply
, sender, content, timestamp
, verify, is_authentic
, to_bytes, from_bytes
, hash, id
)
import identity.{Identity}
import time.{timestamp}
import crypto.{hash, signature}
import serde.{msgpack}
-- A message is a signed envelope with content and metadata
type Message =
{ version: int -- Protocol version (for migration)
, id: message_id.MessageId -- Content-addressed ID
, parent: ?message_id.MessageId -- For threads/replies
, sender: fingerprint.Fingerprint
, content_type: ContentType
, content: bytes.Bytes -- Opaque payload
, created_at: timestamp.Timestamp
, signature: signature.Signature
}
type ContentType =
| Text
| Binary
| Json
| Janus_Ast
| Encrypted -- Content is encrypted for specific recipient(s)
-- Create a new signed message
-- Cryptographically binds content to sender identity
fn create
( from: Identity
, content_type: ContentType
, content: bytes.Bytes
, parent: ?message_id.MessageId = null
) -> Message
let now = timestamp.now()
let sender_fp = identity.fingerprint(from)
-- Content-addressed ID: hash of content + metadata (before signing)
let preliminary =
{ version = 1
, id = message_id.zero() -- Placeholder
, parent = parent
, sender = sender_fp
, content_type = content_type
, content = content
, created_at = now
, signature = signature.zero()
}
let msg_id = compute_id(preliminary)
let to_sign = serialize_for_signing({ preliminary with id = msg_id })
let sig = identity.sign(from, to_sign)
{ preliminary with
id = msg_id
, signature = sig
}
-- Create a reply to an existing message
-- Maintains thread structure
fn create_reply
( from: Identity
, to: Message
, content_type: ContentType
, content: bytes.Bytes
) -> Message
create(from, content_type, content, parent = some(to.id))
-- Get sender fingerprint
fn sender(msg: Message) -> fingerprint.Fingerprint
msg.sender
-- Get content
fn content(msg: Message) -> bytes.Bytes
msg.content
-- Get timestamp
fn timestamp(msg: Message) -> timestamp.Timestamp
msg.created_at
-- Verify message authenticity
-- Checks: signature valid, sender identity not revoked
fn verify(msg: Message, sender_id: Identity) -> bool
let to_verify = serialize_for_signing(msg)
identity.verify(sender_id, to_verify, msg.signature)
-- Quick check without full identity lookup
-- Just verifies signature format and version
fn is_authentic(msg: Message) -> bool
msg.version == 1 and
msg.signature != signature.zero() and
msg.id == compute_id(msg)
-- Serialize to bytes for wire transfer
fn to_bytes(msg: Message) -> bytes.Bytes
msgpack.serialize(msg)
-- Deserialize from bytes
fn from_bytes(data: bytes.Bytes) -> result.Result(Message, error.DeserializeError)
msgpack.deserialize(data)
-- Get content hash (for deduplication, indexing)
fn hash(msg: Message) -> hash.Hash
crypto.blake3(msg.content)
-- Get message ID
fn id(msg: Message) -> message_id.MessageId
msg.id
-- Internal: Compute content-addressed ID
fn compute_id(msg: Message) -> message_id.MessageId
let canonical =
{ version = msg.version
, parent = msg.parent
, sender = msg.sender
, content_type = msg.content_type
, content = msg.content
, created_at = msg.created_at
}
message_id.from_hash(crypto.blake3(msgpack.serialize(canonical)))
-- Internal: Serialize for signing (excludes signature itself)
fn serialize_for_signing(msg: Message) -> bytes.Bytes
msgpack.serialize
{ version = msg.version
, id = msg.id
, parent = msg.parent
, sender = msg.sender
, content_type = msg.content_type
, content = msg.content
, created_at = msg.created_at
}