Skip to content
Snippets Groups Projects
Commit 251cfb92 authored by Ryan Ziegler's avatar Ryan Ziegler
Browse files

init llama

parent 410e0ec7
No related branches found
No related tags found
No related merge requests found
Pipeline #201694 failed
...@@ -1405,6 +1405,18 @@ version = "0.4.15" ...@@ -1405,6 +1405,18 @@ version = "0.4.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab"
[[package]]
name = "llama_jn"
version = "0.1.0"
dependencies = [
"async-std",
"clap",
"hercules_rt",
"image",
"juno_build",
"with_builtin_macros",
]
[[package]] [[package]]
name = "lock_api" name = "lock_api"
version = "0.4.12" version = "0.4.12"
......
...@@ -32,4 +32,5 @@ members = [ ...@@ -32,4 +32,5 @@ members = [
"juno_samples/simple3", "juno_samples/simple3",
"juno_scheduler", "juno_scheduler",
"juno_utils", "juno_utils",
"llama"
] ]
#![feature(exact_size_is_empty)] #![feature(exact_size_is_empty)]
#![feature(let_chains)] #![feature(let_chains)]
#![feature(entry_insert)]
use std::fs::File; use std::fs::File;
use std::io::Read; use std::io::Read;
......
[package]
name = "llama_jn"
version = "0.1.0"
authors = ["Ryan Ziegler <ryanjz2@illinois.edu>"]
edition = "2021"
[[bin]]
name = "llama_jn"
path = "src/main.rs"
[features]
cuda = ["juno_build/cuda", "hercules_rt/cuda"]
[build-dependencies]
juno_build = { path = "../juno_build" }
[dependencies]
juno_build = { path = "../juno_build" }
hercules_rt = { path = "../hercules_rt" }
async-std = "*"
clap = { version = "*", features = ["derive"] }
image = "*"
with_builtin_macros = "0.1.0"
use juno_build::JunoCompiler;
fn main() {
#[cfg(feature = "cuda")]
JunoCompiler::new()
.file_in_src("llama.jn")
.unwrap()
.schedule_in_src("gpu.sch")
.unwrap()
.build()
.unwrap();
#[cfg(not(feature = "cuda"))]
JunoCompiler::new()
.file_in_src("llama.jn")
.unwrap()
.schedule_in_src("cpu.sch")
.unwrap()
.build()
.unwrap();
}
gvn(*);
phi-elim(*);
dce(*);
auto-outline(*);
ip-sroa(*);
sroa(*);
fork-split(*);
unforkify(*);
dce(*);
float-collections(*);
gvn(*);
phi-elim(*);
dce(*);
infer-schedules(*);
gcm(*);
gvn(*);
phi-elim(*);
dce(*);
inline(denoise);
gpu(scale, demosaic, denoise, transform, gamut, tone_map, descale);
ip-sroa(*);
sroa(*);
dce(*);
gvn(*);
phi-elim(*);
dce(*);
// forkify(*);
infer-schedules(*);
gcm(*);
fixpoint {
float-collections(*);
dce(*);
gcm(*);
}
#[entry]
fn llama() -> i32 {
return 1;
}
#![feature(concat_idents)]
#![feature(entry_insert)]
use hercules_rt::runner;
juno_build::juno!("llama");
fn main() {
async_std::task::block_on(async {
let mut r = runner!(llama);
let result = r.run().await;
assert_eq!(result, 1);
});
println!("Hello world!");
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment