Real-world code examples and usage patterns for building on GOKA.
Run a simple inference request on the GOKA network
import { Goka } from '@goka/sdk';
const goka = new Goka({ network: 'mainnet-beta' });
const result = await goka.inference.run({
model: 'gpt-4-turbo',
input: 'Explain quantum computing in simple terms.',
maxTokens: 500,
});
console.log(result.output);
// "Quantum computing uses quantum bits (qubits)..."Stream inference results for real-time applications
import { Goka } from '@goka/sdk';
const goka = new Goka({ network: 'mainnet-beta' });
// Stream tokens as they're generated
for await (const chunk of goka.inference.stream({
model: 'gpt-4-turbo',
input: 'Write a poem about the future of AI.',
})) {
process.stdout.write(chunk);
}
// Output streams in real-time as it's generatedTrain a model across multiple GOKA compute nodes
import { Goka } from '@goka/sdk';
const goka = new Goka({ network: 'mainnet-beta' });
const job = await goka.train.start({
dataset: 'ipfs://Qm...',
modelType: 'transformer',
epochs: 100,
batchSize: 32,
distributed: true,
nodes: 'auto', // Auto-select optimal nodes
});
// Monitor training progress
job.on('progress', (progress) => {
console.log(`Epoch ${progress.epoch}: loss=${progress.loss}`);
});
await job.complete();
console.log('Model saved to:', job.modelPath);Deploy your trained model to the GOKA marketplace
import { Goka } from '@goka/sdk';
const goka = new Goka({ network: 'mainnet-beta' });
const deployment = await goka.models.deploy({
name: 'my-custom-model',
path: 'ipfs://Qm...',
description: 'Fine-tuned for code generation',
pricing: {
perInference: '0.0001 SOL',
perToken: '0.00001 SOL',
},
public: true,
});
console.log('Deployed at:', deployment.endpoint);
// "https://api.goka.ai/v1/models/my-custom-model"Use the GOKA CLI for quick operations
# Install CLI globally
npm install -g @goka/cli
# Initialize project
goka init my-project
# Connect wallet
goka auth login
# Run inference
goka run --model gpt-4-turbo --input "Hello GOKA"
# Start training
goka train --dataset ./data --epochs 50
# Deploy model
goka deploy ./model --name my-model --public
# Check status
goka statusRun GOKA node in a container
# docker-compose.yml
version: '3.8'
services:
goka-node:
image: goka/node:latest
environment:
- NETWORK=mainnet-beta
- WALLET_PATH=/wallet/wallet.json
- GPU_ENABLED=true
volumes:
- ./wallet:/wallet
- ./data:/data
ports:
- "8080:8080"
deploy:
resources:
reservations:
devices:
- capabilities: [gpu]
# Run with: docker-compose up -dReady to build on decentralized AI?