(index<- ) ./libextra/workcache.rs
1 // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #[allow(missing_doc)];
12
13 use digest::Digest;
14 use json;
15 use json::ToJson;
16 use sha1::Sha1;
17 use serialize::{Encoder, Encodable, Decoder, Decodable};
18 use arc::{Arc,RWArc};
19 use treemap::TreeMap;
20 use std::cell::Cell;
21 use std::comm::{PortOne, oneshot};
22 use std::{io, os, task};
23
24 /**
25 *
26 * This is a loose clone of the [fbuild build system](https://github.com/felix-lang/fbuild),
27 * made a touch more generic (not wired to special cases on files) and much
28 * less metaprogram-y due to rust's comparative weakness there, relative to
29 * python.
30 *
31 * It's based around _imperative builds_ that happen to have some function
32 * calls cached. That is, it's _just_ a mechanism for describing cached
33 * functions. This makes it much simpler and smaller than a "build system"
34 * that produces an IR and evaluates it. The evaluation order is normal
35 * function calls. Some of them just return really quickly.
36 *
37 * A cached function consumes and produces a set of _works_. A work has a
38 * name, a kind (that determines how the value is to be checked for
39 * freshness) and a value. Works must also be (de)serializable. Some
40 * examples of works:
41 *
42 * kind name value
43 * ------------------------
44 * cfg os linux
45 * file foo.c <sha1>
46 * url foo.com <etag>
47 *
48 * Works are conceptually single units, but we store them most of the time
49 * in maps of the form (type,name) => value. These are WorkMaps.
50 *
51 * A cached function divides the works it's interested in into inputs and
52 * outputs, and subdivides those into declared (input) works and
53 * discovered (input and output) works.
54 *
55 * A _declared_ input or is one that is given to the workcache before
56 * any work actually happens, in the "prep" phase. Even when a function's
57 * work-doing part (the "exec" phase) never gets called, it has declared
58 * inputs, which can be checked for freshness (and potentially
59 * used to determine that the function can be skipped).
60 *
61 * The workcache checks _all_ works for freshness, but uses the set of
62 * discovered outputs from the _previous_ exec (which it will re-discover
63 * and re-record each time the exec phase runs).
64 *
65 * Therefore the discovered works cached in the db might be a
66 * mis-approximation of the current discoverable works, but this is ok for
67 * the following reason: we assume that if an artifact A changed from
68 * depending on B,C,D to depending on B,C,D,E, then A itself changed (as
69 * part of the change-in-dependencies), so we will be ok.
70 *
71 * Each function has a single discriminated output work called its _result_.
72 * This is only different from other works in that it is returned, by value,
73 * from a call to the cacheable function; the other output works are used in
74 * passing to invalidate dependencies elsewhere in the cache, but do not
75 * otherwise escape from a function invocation. Most functions only have one
76 * output work anyways.
77 *
78 * A database (the central store of a workcache) stores a mappings:
79 *
80 * (fn_name,{declared_input}) => ({discovered_input},
81 * {discovered_output},result)
82 *
83 * (Note: fbuild, which workcache is based on, has the concept of a declared
84 * output as separate from a discovered output. This distinction exists only
85 * as an artifact of how fbuild works: via annotations on function types
86 * and metaprogramming, with explicit dependency declaration as a fallback.
87 * Workcache is more explicit about dependencies, and as such treats all
88 * outputs the same, as discovered-during-the-last-run.)
89 *
90 */
91
92 #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
93 struct WorkKey {
94 kind: ~str,
95 name: ~str
96 }
97
98 impl WorkKey {
99 pub fn new(kind: &str, name: &str) -> WorkKey {
100 WorkKey {
101 kind: kind.to_owned(),
102 name: name.to_owned(),
103 }
104 }
105 }
106
107 // FIXME #8883: The key should be a WorkKey and not a ~str.
108 // This is working around some JSON weirdness.
109 #[deriving(Clone, Eq, Encodable, Decodable)]
110 struct WorkMap(TreeMap<~str, KindMap>);
111
112 #[deriving(Clone, Eq, Encodable, Decodable)]
113 struct KindMap(TreeMap<~str, ~str>);
114
115 impl WorkMap {
116 fn new() -> WorkMap { WorkMap(TreeMap::new()) }
117
118 fn insert_work_key(&mut self, k: WorkKey, val: ~str) {
119 let WorkKey { kind, name } = k;
120 match self.find_mut(&name) {
121 Some(&KindMap(ref mut m)) => { m.insert(kind, val); return; }
122 None => ()
123 }
124 let mut new_map = TreeMap::new();
125 new_map.insert(kind, val);
126 self.insert(name, KindMap(new_map));
127 }
128 }
129
130 struct Database {
131 db_filename: Path,
132 db_cache: TreeMap<~str, ~str>,
133 db_dirty: bool
134 }
135
136 impl Database {
137
138 pub fn new(p: Path) -> Database {
139 let mut rslt = Database {
140 db_filename: p,
141 db_cache: TreeMap::new(),
142 db_dirty: false
143 };
144 if os::path_exists(&rslt.db_filename) {
145 rslt.load();
146 }
147 rslt
148 }
149
150 pub fn prepare(&self,
151 fn_name: &str,
152 declared_inputs: &WorkMap)
153 -> Option<(WorkMap, WorkMap, ~str)> {
154 let k = json_encode(&(fn_name, declared_inputs));
155 match self.db_cache.find(&k) {
156 None => None,
157 Some(v) => Some(json_decode(*v))
158 }
159 }
160
161 pub fn cache(&mut self,
162 fn_name: &str,
163 declared_inputs: &WorkMap,
164 discovered_inputs: &WorkMap,
165 discovered_outputs: &WorkMap,
166 result: &str) {
167 let k = json_encode(&(fn_name, declared_inputs));
168 let v = json_encode(&(discovered_inputs,
169 discovered_outputs,
170 result));
171 self.db_cache.insert(k,v);
172 self.db_dirty = true
173 }
174
175 // FIXME #4330: This should have &mut self and should set self.db_dirty to false.
176 fn save(&self) {
177 let f = io::file_writer(&self.db_filename, [io::Create, io::Truncate]).unwrap();
178 self.db_cache.to_json().to_pretty_writer(f);
179 }
180
181 fn load(&mut self) {
182 assert!(!self.db_dirty);
183 assert!(os::path_exists(&self.db_filename));
184 let f = io::file_reader(&self.db_filename);
185 match f {
186 Err(e) => fail!("Couldn't load workcache database %s: %s",
187 self.db_filename.to_str(), e.to_str()),
188 Ok(r) =>
189 match json::from_reader(r) {
190 Err(e) => fail!("Couldn't parse workcache database (from file %s): %s",
191 self.db_filename.to_str(), e.to_str()),
192 Ok(r) => {
193 let mut decoder = json::Decoder(r);
194 self.db_cache = Decodable::decode(&mut decoder);
195 }
196 }
197 }
198 }
199 }
200
201 #[unsafe_destructor]
202 impl Drop for Database {
203 fn drop(&mut self) {
204 if self.db_dirty {
205 self.save();
206 }
207 }
208 }
209
210 struct Logger {
211 // FIXME #4432: Fill in
212 a: ()
213 }
214
215 impl Logger {
216
217 pub fn new() -> Logger {
218 Logger { a: () }
219 }
220
221 pub fn info(&self, i: &str) {
222 io::println(~"workcache: " + i);
223 }
224 }
225
226 type FreshnessMap = TreeMap<~str,extern fn(&str,&str)->bool>;
227
228 #[deriving(Clone)]
229 struct Context {
230 db: RWArc<Database>,
231 logger: RWArc<Logger>,
232 cfg: Arc<json::Object>,
233 /// Map from kinds (source, exe, url, etc.) to a freshness function.
234 /// The freshness function takes a name (e.g. file path) and value
235 /// (e.g. hash of file contents) and determines whether it's up-to-date.
236 /// For example, in the file case, this would read the file off disk,
237 /// hash it, and return the result of comparing the given hash and the
238 /// read hash for equality.
239 freshness: Arc<FreshnessMap>
240 }
241
242 struct Prep<'self> {
243 ctxt: &'self Context,
244 fn_name: &'self str,
245 declared_inputs: WorkMap,
246 }
247
248 struct Exec {
249 discovered_inputs: WorkMap,
250 discovered_outputs: WorkMap
251 }
252
253 enum Work<'self, T> {
254 WorkValue(T),
255 WorkFromTask(&'self Prep<'self>, PortOne<(Exec, T)>),
256 }
257
258 fn json_encode<T:Encodable<json::Encoder>>(t: &T) -> ~str {
259 do io::with_str_writer |wr| {
260 let mut encoder = json::Encoder(wr);
261 t.encode(&mut encoder);
262 }
263 }
264
265 // FIXME(#5121)
266 fn json_decode<T:Decodable<json::Decoder>>(s: &str) -> T {
267 debug!("json decoding: %s", s);
268 do io::with_str_reader(s) |rdr| {
269 let j = json::from_reader(rdr).unwrap();
270 let mut decoder = json::Decoder(j);
271 Decodable::decode(&mut decoder)
272 }
273 }
274
275 fn digest<T:Encodable<json::Encoder>>(t: &T) -> ~str {
276 let mut sha = ~Sha1::new();
277 (*sha).input_str(json_encode(t));
278 (*sha).result_str()
279 }
280
281 fn digest_file(path: &Path) -> ~str {
282 let mut sha = ~Sha1::new();
283 let s = io::read_whole_file_str(path);
284 (*sha).input_str(s.unwrap());
285 (*sha).result_str()
286 }
287
288 impl Context {
289
290 pub fn new(db: RWArc<Database>,
291 lg: RWArc<Logger>,
292 cfg: Arc<json::Object>) -> Context {
293 Context::new_with_freshness(db, lg, cfg, Arc::new(TreeMap::new()))
294 }
295
296 pub fn new_with_freshness(db: RWArc<Database>,
297 lg: RWArc<Logger>,
298 cfg: Arc<json::Object>,
299 freshness: Arc<FreshnessMap>) -> Context {
300 Context {
301 db: db,
302 logger: lg,
303 cfg: cfg,
304 freshness: freshness
305 }
306 }
307
308 pub fn prep<'a>(&'a self, fn_name: &'a str) -> Prep<'a> {
309 Prep::new(self, fn_name)
310 }
311
312 pub fn with_prep<'a, T>(&'a self, fn_name: &'a str, blk: &fn(p: &mut Prep) -> T) -> T {
313 let mut p = self.prep(fn_name);
314 blk(&mut p)
315 }
316
317 }
318
319 impl Exec {
320 pub fn discover_input(&mut self,
321 dependency_kind: &str,
322 dependency_name: &str,
323 dependency_val: &str) {
324 debug!("Discovering input %s %s %s", dependency_kind, dependency_name, dependency_val);
325 self.discovered_inputs.insert_work_key(WorkKey::new(dependency_kind, dependency_name),
326 dependency_val.to_owned());
327 }
328 pub fn discover_output(&mut self,
329 dependency_kind: &str,
330 dependency_name: &str,
331 dependency_val: &str) {
332 debug!("Discovering output %s %s %s", dependency_kind, dependency_name, dependency_val);
333 self.discovered_outputs.insert_work_key(WorkKey::new(dependency_kind, dependency_name),
334 dependency_val.to_owned());
335 }
336
337 // returns pairs of (kind, name)
338 pub fn lookup_discovered_inputs(&self) -> ~[(~str, ~str)] {
339 let mut rs = ~[];
340 for (k, v) in self.discovered_inputs.iter() {
341 for (k1, _) in v.iter() {
342 rs.push((k1.clone(), k.clone()));
343 }
344 }
345 rs
346 }
347 }
348
349 impl<'self> Prep<'self> {
350 fn new(ctxt: &'self Context, fn_name: &'self str) -> Prep<'self> {
351 Prep {
352 ctxt: ctxt,
353 fn_name: fn_name,
354 declared_inputs: WorkMap::new()
355 }
356 }
357
358 pub fn lookup_declared_inputs(&self) -> ~[~str] {
359 let mut rs = ~[];
360 for (_, v) in self.declared_inputs.iter() {
361 for (inp, _) in v.iter() {
362 rs.push(inp.clone());
363 }
364 }
365 rs
366 }
367 }
368
369 impl<'self> Prep<'self> {
370 pub fn declare_input(&mut self, kind: &str, name: &str, val: &str) {
371 debug!("Declaring input %s %s %s", kind, name, val);
372 self.declared_inputs.insert_work_key(WorkKey::new(kind, name),
373 val.to_owned());
374 }
375
376 fn is_fresh(&self, cat: &str, kind: &str,
377 name: &str, val: &str) -> bool {
378 let k = kind.to_owned();
379 let f = self.ctxt.freshness.get().find(&k);
380 debug!("freshness for: %s/%s/%s/%s", cat, kind, name, val)
381 let fresh = match f {
382 None => fail!("missing freshness-function for '%s'", kind),
383 Some(f) => (*f)(name, val)
384 };
385 do self.ctxt.logger.write |lg| {
386 if fresh {
387 lg.info(fmt!("%s %s:%s is fresh",
388 cat, kind, name));
389 } else {
390 lg.info(fmt!("%s %s:%s is not fresh",
391 cat, kind, name))
392 }
393 };
394 fresh
395 }
396
397 fn all_fresh(&self, cat: &str, map: &WorkMap) -> bool {
398 for (k_name, kindmap) in map.iter() {
399 for (k_kind, v) in kindmap.iter() {
400 if ! self.is_fresh(cat, *k_kind, *k_name, *v) {
401 return false;
402 }
403 }
404 }
405 return true;
406 }
407
408 pub fn exec<T:Send +
409 Encodable<json::Encoder> +
410 Decodable<json::Decoder>>(
411 &'self self, blk: ~fn(&mut Exec) -> T) -> T {
412 self.exec_work(blk).unwrap()
413 }
414
415 fn exec_work<T:Send +
416 Encodable<json::Encoder> +
417 Decodable<json::Decoder>>( // FIXME(#5121)
418 &'self self, blk: ~fn(&mut Exec) -> T) -> Work<'self, T> {
419 let mut bo = Some(blk);
420
421 debug!("exec_work: looking up %s and %?", self.fn_name,
422 self.declared_inputs);
423 let cached = do self.ctxt.db.read |db| {
424 db.prepare(self.fn_name, &self.declared_inputs)
425 };
426
427 match cached {
428 Some((ref disc_in, ref disc_out, ref res))
429 if self.all_fresh("declared input",&self.declared_inputs) &&
430 self.all_fresh("discovered input", disc_in) &&
431 self.all_fresh("discovered output", disc_out) => {
432 debug!("Cache hit!");
433 debug!("Trying to decode: %? / %? / %?",
434 disc_in, disc_out, *res);
435 Work::from_value(json_decode(*res))
436 }
437
438 _ => {
439 debug!("Cache miss!");
440 let (port, chan) = oneshot();
441 let blk = bo.take_unwrap();
442 let chan = Cell::new(chan);
443
444 // What happens if the task fails?
445 do task::spawn {
446 let mut exe = Exec {
447 discovered_inputs: WorkMap::new(),
448 discovered_outputs: WorkMap::new(),
449 };
450 let chan = chan.take();
451 let v = blk(&mut exe);
452 chan.send((exe, v));
453 }
454 Work::from_task(self, port)
455 }
456 }
457 }
458 }
459
460 impl<'self, T:Send +
461 Encodable<json::Encoder> +
462 Decodable<json::Decoder>>
463 Work<'self, T> { // FIXME(#5121)
464
465 pub fn from_value(elt: T) -> Work<'self, T> {
466 WorkValue(elt)
467 }
468 pub fn from_task(prep: &'self Prep<'self>, port: PortOne<(Exec, T)>)
469 -> Work<'self, T> {
470 WorkFromTask(prep, port)
471 }
472
473 pub fn unwrap(self) -> T {
474 match self {
475 WorkValue(v) => v,
476 WorkFromTask(prep, port) => {
477 let (exe, v) = port.recv();
478 let s = json_encode(&v);
479 do prep.ctxt.db.write |db| {
480 db.cache(prep.fn_name,
481 &prep.declared_inputs,
482 &exe.discovered_inputs,
483 &exe.discovered_outputs,
484 s);
485 }
486 v
487 }
488 }
489 }
490 }
491
492
493 #[test]
494 fn test() {
495 use std::io::WriterUtil;
496 use std::{os, run};
497
498 // Create a path to a new file 'filename' in the directory in which
499 // this test is running.
500 fn make_path(filename: ~str) -> Path {
501 let pth = os::self_exe_path().expect("workcache::test failed").pop().push(filename);
502 if os::path_exists(&pth) {
503 os::remove_file(&pth);
504 }
505 return pth;
506 }
507
508 let pth = make_path(~"foo.c");
509 {
510 let r = io::file_writer(&pth, [io::Create]);
511 r.unwrap().write_str("int main() { return 0; }");
512 }
513
514 let db_path = make_path(~"db.json");
515
516 let cx = Context::new(RWArc::new(Database::new(db_path)),
517 RWArc::new(Logger::new()),
518 Arc::new(TreeMap::new()));
519
520 let s = do cx.with_prep("test1") |prep| {
521
522 let subcx = cx.clone();
523 let pth = pth.clone();
524
525 prep.declare_input("file", pth.to_str(), digest_file(&pth));
526 do prep.exec |_exe| {
527 let out = make_path(~"foo.o");
528 run::process_status("gcc", [pth.to_str(), ~"-o", out.to_str()]);
529
530 let _proof_of_concept = subcx.prep("subfn");
531 // Could run sub-rules inside here.
532
533 out.to_str()
534 }
535 };
536
537 io::println(s);
538 }
libextra/workcache.rs:247:1-247:1 -struct- definition:
struct Exec {
references:-411: &'self self, blk: ~fn(&mut Exec) -> T) -> T {
418: &'self self, blk: ~fn(&mut Exec) -> T) -> Work<'self, T> {
468: pub fn from_task(prep: &'self Prep<'self>, port: PortOne<(Exec, T)>)
319: impl Exec {
255: WorkFromTask(&'self Prep<'self>, PortOne<(Exec, T)>),
446: let mut exe = Exec {
libextra/workcache.rs:252:1-252:1 -enum- definition:
enum Work<'self, T> {
references:-418: &'self self, blk: ~fn(&mut Exec) -> T) -> Work<'self, T> {
463: Work<'self, T> { // FIXME(#5121)
465: pub fn from_value(elt: T) -> Work<'self, T> {
469: -> Work<'self, T> {
libextra/workcache.rs:92:64-92:64 -struct- definition:
#[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
struct WorkKey {
references:-92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
98: impl WorkKey {
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
99: pub fn new(kind: &str, name: &str) -> WorkKey {
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
100: WorkKey {
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
119: let WorkKey { kind, name } = k;
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
118: fn insert_work_key(&mut self, k: WorkKey, val: ~str) {
92: #[deriving(Clone, Eq, Encodable, Decodable, TotalOrd, TotalEq)]
<quote expansion>:
2: WorkKey{kind:
libextra/workcache.rs:225:1-225:1 -ty- definition:
type FreshnessMap = TreeMap<~str,extern fn(&str,&str)->bool>;
references:-239: freshness: Arc<FreshnessMap>
299: freshness: Arc<FreshnessMap>) -> Context {
libextra/workcache.rs:109:45-109:45 -struct- definition:
#[deriving(Clone, Eq, Encodable, Decodable)]
struct WorkMap(TreeMap<~str, KindMap>);
references:-109: #[deriving(Clone, Eq, Encodable, Decodable)]
163: declared_inputs: &WorkMap,
249: discovered_inputs: WorkMap,
245: declared_inputs: WorkMap,
109: #[deriving(Clone, Eq, Encodable, Decodable)]
109: #[deriving(Clone, Eq, Encodable, Decodable)]
152: declared_inputs: &WorkMap)
109: #[deriving(Clone, Eq, Encodable, Decodable)]
250: discovered_outputs: WorkMap
109: #[deriving(Clone, Eq, Encodable, Decodable)]
164: discovered_inputs: &WorkMap,
109: #[deriving(Clone, Eq, Encodable, Decodable)]
109: #[deriving(Clone, Eq, Encodable, Decodable)]
116: fn new() -> WorkMap { WorkMap(TreeMap::new()) }
109: #[deriving(Clone, Eq, Encodable, Decodable)]
115: impl WorkMap {
165: discovered_outputs: &WorkMap,
397: fn all_fresh(&self, cat: &str, map: &WorkMap) -> bool {
153: -> Option<(WorkMap, WorkMap, ~str)> {
153: -> Option<(WorkMap, WorkMap, ~str)> {
libextra/workcache.rs:265:16-265:16 -fn- definition:
// FIXME(#5121)
fn json_decode<T:Decodable<json::Decoder>>(s: &str) -> T {
references:-435: Work::from_value(json_decode(*res))
157: Some(v) => Some(json_decode(*v))
libextra/workcache.rs:241:1-241:1 -struct- definition:
struct Prep<'self> {
references:-349: impl<'self> Prep<'self> {
369: impl<'self> Prep<'self> {
350: fn new(ctxt: &'self Context, fn_name: &'self str) -> Prep<'self> {
308: pub fn prep<'a>(&'a self, fn_name: &'a str) -> Prep<'a> {
255: WorkFromTask(&'self Prep<'self>, PortOne<(Exec, T)>),
351: Prep {
312: pub fn with_prep<'a, T>(&'a self, fn_name: &'a str, blk: &fn(p: &mut Prep) -> T) -> T {
468: pub fn from_task(prep: &'self Prep<'self>, port: PortOne<(Exec, T)>)
libextra/workcache.rs:209:1-209:1 -struct- definition:
struct Logger {
references:-291: lg: RWArc<Logger>,
217: pub fn new() -> Logger {
297: lg: RWArc<Logger>,
231: logger: RWArc<Logger>,
215: impl Logger {
218: Logger { a: () }
libextra/workcache.rs:228:19-228:19 -struct- definition:
#[deriving(Clone)]
struct Context {
references:-299: freshness: Arc<FreshnessMap>) -> Context {
300: Context {
243: ctxt: &'self Context,
228: #[deriving(Clone)]
350: fn new(ctxt: &'self Context, fn_name: &'self str) -> Prep<'self> {
228: #[deriving(Clone)]
228: #[deriving(Clone)]
288: impl Context {
292: cfg: Arc<json::Object>) -> Context {
228: #[deriving(Clone)]
libextra/workcache.rs:257:1-257:1 -fn- definition:
fn json_encode<T:Encodable<json::Encoder>>(t: &T) -> ~str {
references:-154: let k = json_encode(&(fn_name, declared_inputs));
478: let s = json_encode(&v);
277: (*sha).input_str(json_encode(t));
168: let v = json_encode(&(discovered_inputs,
167: let k = json_encode(&(fn_name, declared_inputs));
libextra/workcache.rs:129:1-129:1 -struct- definition:
struct Database {
references:-230: db: RWArc<Database>,
138: pub fn new(p: Path) -> Database {
139: let mut rslt = Database {
290: pub fn new(db: RWArc<Database>,
136: impl Database {
202: impl Drop for Database {
296: pub fn new_with_freshness(db: RWArc<Database>,
libextra/workcache.rs:112:45-112:45 -struct- definition:
#[deriving(Clone, Eq, Encodable, Decodable)]
struct KindMap(TreeMap<~str, ~str>);
references:-112: #[deriving(Clone, Eq, Encodable, Decodable)]
112: #[deriving(Clone, Eq, Encodable, Decodable)]
112: #[deriving(Clone, Eq, Encodable, Decodable)]
112: #[deriving(Clone, Eq, Encodable, Decodable)]
112: #[deriving(Clone, Eq, Encodable, Decodable)]
112: #[deriving(Clone, Eq, Encodable, Decodable)]
112: #[deriving(Clone, Eq, Encodable, Decodable)]
110: struct WorkMap(TreeMap<~str, KindMap>);
112: #[deriving(Clone, Eq, Encodable, Decodable)]