Installation
Linux / macOS
Download and install pre-compiled binary:
# Download (choose your platform)
curl -LO https://github.com/charlcoding-stack/charlcode/releases/download/v0.3.0/charl-linux-x86_64.tar.gz
# or for macOS: charl-macos-x86_64.tar.gz or charl-macos-arm64.tar.gz
# Extract and install
tar -xzf charl-*.tar.gz
sudo mv charl /usr/local/bin/
Verify installation:
charl --version
For all platform options, visit the downloads page.
Windows
Download the Windows binary and add it to your PATH:
# Download from:
# https://github.com/charlcoding-stack/charlcode/releases/download/v0.3.0/charl-windows-x86_64.zip
# Extract charl.exe from the zip file
# Move to a directory in your PATH
# Or run directly from download location
Verify installation:
charl --version
For detailed instructions, visit the downloads page.
Build from Source
For the latest features or to contribute, build from source:
# Clone repository
git clone https://github.com/charlcoding-stack/charlcode.git
cd charlcode
# Build release binary
cargo build --release
# Binary will be at target/release/charl
./target/release/charl --version
Requirements: Rust 1.70 or later, LLVM 15 (optional for LLVM backend)
Your First Program
Hello World
Create a file named hello.ch:
// hello.ch - Your first Charl program
let message = "Hello, Charl!"
print(message)
// Functions
fn greet(name: string) -> string {
return "Hello, " + name + "!"
}
let greeting = greet("World")
print(greeting)
Run the program:
charl run hello.ch
Expected output:
Hello, Charl!
Hello, World!
Simple Neural Network Training
Create nn.ch to train a simple neural network:
// nn.ch - XOR neural network training
let X = tensor([0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0], [4, 2])
let Y = tensor([0.0, 1.0, 1.0, 0.0], [4, 1])
// Initialize parameters with gradient tracking
let W1 = tensor_with_grad([0.5, -0.3, 0.2, 0.4], [2, 2])
let b1 = tensor_with_grad([0.1, -0.1], [2])
// Create optimizer
let optimizer = adam_create(0.01)
// Training loop
let epoch = 0
while epoch < 100 {
// Forward pass
let h1 = nn_linear(X, W1, b1)
let pred = nn_sigmoid(h1)
// Compute loss
let loss = nn_mse_loss(pred, Y)
// Backward pass
tensor_backward(loss)
// Update parameters
let params = [W1, b1]
let updated = adam_step(optimizer, params)
W1 = updated[0]
b1 = updated[1]
if epoch % 25 == 0 {
print("Epoch " + str(epoch) + ": Loss = " + str(tensor_item(loss)))
}
epoch = epoch + 1
}
print("Training complete!")
Basic Concepts
Variables and Types
// Type inference
let x = 42 // int32
let y = 3.14 // float64
let name = "Charl" // string
let flag = true // bool
// Explicit types
let a: int32 = 100
let b: float64 = 2.5
let c: string = "explicit"
// Arrays
let numbers = [1, 2, 3, 4, 5]
let floats = [1.0, 2.0, 3.0]
// Tensors
let t = tensor([1.0, 2.0, 3.0], [3])
let matrix = tensor_randn([10, 10])
Functions
// Basic function
fn add(a: int32, b: int32) -> int32 {
return a + b
}
// Type inference
fn multiply(x, y) {
return x * y
}
// Higher-order functions
fn apply(f: fn(int32) -> int32, x: int32) -> int32 {
return f(x)
}
fn square(n: int32) -> int32 {
return n * n
}
let result = apply(square, 5) // 25
// Closures
let add_n = |x| { x + 10 }
print(add_n(5)) // 15
Control Flow
// If-else
let x = 10
if x > 5 {
print("x is greater than 5")
} else {
print("x is less than or equal to 5")
}
// While loops
let counter = 0
while counter < 5 {
print(counter)
let counter = counter + 1
}
CLI Commands
| Command | Description |
|---|---|
| charl run <file> | Execute a Charl script |
| charl build <file> | Compile to standalone executable |
| charl repl | Start interactive REPL |
| charl test <dir> | Run tests in directory |
| charl --version | Show version information |
| charl --help | Display help information |
Command Examples
Run a script
charl run script.ch
charl run script.ch --verbose # Verbose output
Build executable
charl build app.ch
charl build app.ch -o myapp # Custom output name
charl build app.ch --release # Optimized build
Interactive REPL
charl repl
Example REPL session:
charl:001> let x = 42
=> Int32(42)
charl:002> let y = x * 2
=> Int32(84)
charl:003> fn square(n: int32) -> int32 { return n * n }
=> Function
charl:004> square(5)
=> Int32(25)
charl:005> exit
Goodbye!
Project Setup
Recommended Project Structure
my-charl-project/
├── src/
│ ├── main.ch # Entry point
│ ├── models/
│ │ ├── network.ch # Neural network definitions
│ │ └── layers.ch # Custom layers
│ ├── data/
│ │ └── loader.ch # Data loading utilities
│ └── utils/
│ └── helpers.ch # Helper functions
├── tests/
│ ├── test_models.ch
│ └── test_utils.ch
├── examples/
│ └── simple_example.ch
└── README.md
Example Project
Create a simple machine learning project:
# Create project directory
mkdir my-ml-project
cd my-ml-project
# Create source file
cat > main.ch << 'EOF'
// main.ch - Simple tensor operations
// Create tensors
let x = tensor([1.0, 2.0, 3.0, 4.0], [4])
let y = tensor([2.0, 3.0, 4.0, 5.0], [4])
// Perform operations
let sum = tensor_add(x, y)
let product = tensor_mul(x, y)
print("Sum:", sum)
print("Product:", product)
// Matrix multiplication
let a = tensor_randn([3, 4])
let b = tensor_randn([4, 2])
let result = tensor_matmul(a, b)
print("Matrix result:", result)
EOF
# Run the project
charl run main.ch
Next Steps
Language Reference
Learn the complete Charl language syntax and semantics.
Read language reference →