@@ -208,19 +208,8 @@ impl From<Select> for NodeKey {
208
208
}
209
209
}
210
210
211
- pub fn lift_directory_digest ( types : & Types , digest : & Value ) -> Result < hashing:: Digest , String > {
212
- if types. directory_digest != externs:: get_type_for ( digest) {
213
- return Err ( format ! (
214
- "{} is not of type {}." ,
215
- digest, types. directory_digest
216
- ) ) ;
217
- }
218
- let fingerprint = externs:: getattr_as_string ( & digest, "fingerprint" ) ;
219
- let digest_length: usize = externs:: getattr ( & digest, "serialized_bytes_length" ) . unwrap ( ) ;
220
- Ok ( hashing:: Digest (
221
- hashing:: Fingerprint :: from_hex_string ( & fingerprint) ?,
222
- digest_length,
223
- ) )
211
+ pub fn lift_directory_digest ( digest : & Value ) -> Result < hashing:: Digest , String > {
212
+ externs:: fs:: from_py_digest ( digest) . map_err ( |e| format ! ( "{:?}" , e) )
224
213
}
225
214
226
215
pub fn lift_file_digest ( types : & Types , digest : & Value ) -> Result < hashing:: Digest , String > {
@@ -244,11 +233,7 @@ pub struct MultiPlatformExecuteProcess {
244
233
}
245
234
246
235
impl MultiPlatformExecuteProcess {
247
- fn lift_process (
248
- types : & Types ,
249
- value : & Value ,
250
- platform_constraint : Option < Platform > ,
251
- ) -> Result < Process , String > {
236
+ fn lift_process ( value : & Value , platform_constraint : Option < Platform > ) -> Result < Process , String > {
252
237
let env = externs:: getattr_from_frozendict ( & value, "env" ) ;
253
238
254
239
let working_directory = {
@@ -261,8 +246,8 @@ impl MultiPlatformExecuteProcess {
261
246
} ;
262
247
263
248
let py_digest: Value = externs:: getattr ( & value, "input_digest" ) . unwrap ( ) ;
264
- let digest = lift_directory_digest ( types , & py_digest )
265
- . map_err ( |err| format ! ( "Error parsing digest {}" , err) ) ?;
249
+ let digest =
250
+ lift_directory_digest ( & py_digest ) . map_err ( |err| format ! ( "Error parsing digest {}" , err) ) ?;
266
251
267
252
let output_files = externs:: getattr :: < Vec < String > > ( & value, "output_files" )
268
253
. unwrap ( )
@@ -336,7 +321,7 @@ impl MultiPlatformExecuteProcess {
336
321
} )
337
322
}
338
323
339
- pub fn lift ( types : & Types , value : & Value ) -> Result < MultiPlatformExecuteProcess , String > {
324
+ pub fn lift ( value : & Value ) -> Result < MultiPlatformExecuteProcess , String > {
340
325
let raw_constraints = externs:: getattr :: < Vec < Option < String > > > ( & value, "platform_constraints" ) ?;
341
326
let constraints = raw_constraints
342
327
. into_iter ( )
@@ -356,8 +341,7 @@ impl MultiPlatformExecuteProcess {
356
341
357
342
let mut request_by_constraint: BTreeMap < Option < Platform > , Process > = BTreeMap :: new ( ) ;
358
343
for ( constraint, execute_process) in constraints. iter ( ) . zip ( processes. iter ( ) ) {
359
- let underlying_req =
360
- MultiPlatformExecuteProcess :: lift_process ( types, execute_process, * constraint) ?;
344
+ let underlying_req = MultiPlatformExecuteProcess :: lift_process ( execute_process, * constraint) ?;
361
345
request_by_constraint. insert ( * constraint, underlying_req. clone ( ) ) ;
362
346
}
363
347
@@ -640,7 +624,7 @@ impl Snapshot {
640
624
}
641
625
642
626
pub fn store_directory_digest ( item : & hashing:: Digest ) -> Result < Value , String > {
643
- externs:: fs:: new_py_digest ( * item)
627
+ externs:: fs:: to_py_digest ( * item)
644
628
. map ( |d| d. into_object ( ) . into ( ) )
645
629
. map_err ( |e| format ! ( "{:?}" , e) )
646
630
}
@@ -656,7 +640,7 @@ impl Snapshot {
656
640
}
657
641
658
642
pub fn store_snapshot ( item : store:: Snapshot ) -> Result < Value , String > {
659
- externs:: fs:: new_py_snapshot ( item)
643
+ externs:: fs:: to_py_snapshot ( item)
660
644
. map ( |d| d. into_object ( ) . into ( ) )
661
645
. map_err ( |e| format ! ( "{:?}" , e) )
662
646
}
0 commit comments