Legend:
Page
Library
Module
Module type
Parameter
Class
Class type
Source
Source file local.ml
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501openStduneopenResult.OopenCache_intftypet={root:Path.t;build_root:Path.toption;info:User_message.Style.tPp.tlist->unit;warn:User_message.Style.tPp.tlist->unit;repositories:repositorylist;command_handler:command->unit;duplication_mode:Duplication_mode.t;temp_dir:Path.t}moduleTrimming_result=structtypet={trimmed_files_size:int;trimmed_files:Path.tlist;trimmed_metafiles:Path.tlist}letempty={trimmed_files_size=0;trimmed_files=[];trimmed_metafiles=[]}letaddt~size~file={twithtrimmed_files=file::t.trimmed_files;trimmed_files_size=size+t.trimmed_files_size}endletdefault_root()=Path.L.relative(Path.of_stringXdg.cache_dir)["dune";"db";"v2"](* Handling file digest collisions by appending suffices ".1", ".2", etc. to the
files stored in the cache.
To find a cache entry matching a given file, we try the suffices one after
another until we either (i) find a match and return [Found {existing_path}]
where the [existing_path] includes the correct suffix, or (ii) find a suffix
that is missing in the cache and return [Not_found {next_available_path}]
where the [next_available_path] includes the first available suffix.
CR-someday amokhov: In Dune we generally assume that digest collisions are
impossible, so it seems better to remove this logic in future. *)moduleCollision_chain=structtypesearch_result=|Foundof{existing_path:Path.t}|Not_foundof{next_available_path:Path.t}(* This function assumes that we do not create holes in the suffix numbering. *)letsearchpathfile=letrecloopn=letpath=Path.extend_basenamepath~suffix:("."^string_of_intn)inifPath.existspaththenifIo.compare_filespathfile=Ordering.EqthenFound{existing_path=path}elseloop(n+1)elseNot_found{next_available_path=path}inloop1end(* A file storage scheme. *)moduletypeFSScheme=sig(* Given a cache root and a file digest, determine the location of the file in
the cache. *)valpath:root:Path.t->Digest.t->Path.t(* Extract a file's digest from its location in the cache. *)valdigest:Path.t->Digest.t(* Given a cache root, list all files stored in the cache. *)vallist:root:Path.t->Path.tlistend(* A file storage scheme where a file with a digest [d] is stored in a
subdirectory whose name is made of the first two characters of [d], that is:
[<root>/<first-two-characters-of-d>/<d>.<N>]
The suffix [.<N>] is used to handle collisions, i.e. the (unlikely)
situations where two files have the same digest.
CR-soon amokhov: Note that the function [path] returns the path without the
[.<N>] suffix, whereas the function [digest] expects the [.<N>] suffix to be
present. We should fix this inconsistency. *)moduleFirstTwoCharsSubdir:FSScheme=structletpath~rootdigest=letdigest=Digest.to_stringdigestinletfirst_two_chars=String.subdigest~pos:0~len:2inPath.L.relativeroot[first_two_chars;digest]letdigestpath=matchDigest.from_hex(Path.basename(fst(Path.split_extensionpath)))with|Somedigest->digest|None->Code_error.raise"strange cached file path (not a valid digest)"[(Path.to_stringpath,Path.to_dynpath)]letlist~root=letfdir=letis_hex_charc=letchar_inse=Char.comparecs>=0&&Char.comparece<=0inchar_in'a''f'||char_in'0''9'androot=Path.L.relativeroot[dir]inifString.for_all~f:is_hex_chardirthenArray.map~f:(Path.relativeroot)(Sys.readdir(Path.to_stringroot))elseArray.of_list[]inArray.to_list(Array.concat(Array.to_list(Array.map~f(Sys.readdir(Path.to_stringroot)))))endmoduleFSSchemeImpl=FirstTwoCharsSubdirmoduleMetadata_file=structtypet={metadata:Sexp.tlist;files:File.tlist}letto_sexp{metadata;files}=letopenSexpinletf({in_the_build_directory;in_the_cache;_}:File.t)=Sexp.List[Sexp.Atom(Path.Local.to_string(Path.Build.localin_the_build_directory));Sexp.Atom(Path.to_stringin_the_cache)]inList[List(Atom"metadata"::metadata);List(Atom"files"::List.map~ffiles)]letof_sexp=function|Sexp.List[List(Atom"metadata"::metadata);List(Atom"files"::produced)]->let+files=Result.List.mapproduced~f:(function|List[Atomin_the_build_directory;Atomin_the_cache]->letin_the_build_directory=Path.Build.of_stringin_the_build_directoryandin_the_cache=Path.of_stringin_the_cacheinOk{File.in_the_cache;in_the_build_directory;digest=FSSchemeImpl.digestin_the_cache}|_->Error"invalid metadata scheme in produced files list")in{metadata;files}|_->Error"invalid metadata"letof_strings=matchCsexp.parse_stringswith|Oksexp->of_sexpsexp|Error(_,msg)->Errormsgletto_stringf=to_sexpf|>Csexp.to_stringletparsepath=Io.with_file_inpath~f:Csexp.input>>=of_sexpendletroot_datacache=Path.relativecache.root"files"letroot_metadatacache=Path.relativecache.root"meta"letpath_metadatacachekey=FSSchemeImpl.path~root:(root_metadatacache)keyletpath_datacachekey=FSSchemeImpl.path~root:(root_datacache)keyletmake_pathcachepath=matchcache.build_rootwith|Somep->Result.ok(Path.append_localppath)|None->Result.Error(sprintf"relative path %s while no build root was set"(Path.Local.to_string_maybe_quotedpath))letsearchcachedigestfile=Collision_chain.search(path_datacachedigest)fileletwith_repositoriescacherepositories={cachewithrepositories}letduplicate?(duplication=None)cache~src~dst=matchOption.value~default:cache.duplication_modeduplicationwith|Copy->Io.copy_file~src~dst()|Hardlink->Path.linksrcdstletretrievecache(file:File.t)=letpath=Path.buildfile.in_the_build_directoryincache.info[Pp.textf"retrieve %s from cache"(Path.to_string_maybe_quotedpath)];duplicatecache~src:file.in_the_cache~dst:path;pathletdeduplicatecache(file:File.t)=matchcache.duplication_modewith|Copy->()|Hardlink->(lettarget=Path.Build.to_stringfile.in_the_build_directoryinlettmpname=Path.Build.to_string(Path.Build.of_string".dedup")incache.info[Pp.textf"deduplicate %s from %s"target(Path.to_stringfile.in_the_cache)];letrmp=tryUnix.unlinkpwith_->()intryrmtmpname;Unix.link(Path.to_stringfile.in_the_cache)tmpname;Unix.renametmpnametargetwithUnix.Unix_error(e,syscall,_)->rmtmpname;cache.warn[Pp.textf"error handling dune-cache command: %s: %s"syscall(Unix.error_messagee)])letapply~fov=matchowith|Someo->fvo|None->vletpromote_synccachepathskeymetadata~repository~duplication=letopenResult.Oinlet*repo=matchrepositorywith|Someidx->(matchList.nthcache.repositoriesidxwith|None->Result.Error(Printf.sprintf"repository out of range: %i"idx)|repo->Result.Okrepo)|None->Result.OkNoneinletmetadata=apply~f:(funmetadatarepository->metadata@[Sexp.List[Sexp.Atom"repo";Sexp.Atomrepository.remote];Sexp.List[Sexp.Atom"commit_id";Sexp.Atomrepository.commit]])repometadatainletpromote(path,expected_digest)=let*abs_path=make_pathcache(Path.Build.localpath)incache.info[Pp.textf"promote %s"(Path.to_stringabs_path)];letstat=Unix.lstat(Path.to_stringabs_path)inlet*stat=ifstat.st_kind=S_REGthenResult.OkstatelseResult.Error(Format.sprintf"invalid file type: %s"(Path.string_of_file_kindstat.st_kind))in(* Create a duplicate (either a [Copy] or a [Hardlink] depending on the
[duplication] setting) of the promoted file in a temporary directory to
correctly handle the situation when the file is modified or deleted
during the promotion process. *)lettmp=letdst=Path.relativecache.temp_dir"data"inifPath.existsdstthenPath.unlinkdst;duplicate~duplicationcache~src:abs_path~dst;dstinleteffective_digest=Digest.file_with_statstmp(Path.stattmp)inifDigest.compareeffective_digestexpected_digest!=Ordering.Eqthen(letmessage=Printf.sprintf"digest mismatch: %s != %s"(Digest.to_stringeffective_digest)(Digest.to_stringexpected_digest)incache.info[Pp.textmessage];Result.Errormessage)elsematchsearchcacheeffective_digesttmpwith|Collision_chain.Found{existing_path}->(* We no longer need the temporary file. *)Path.unlinktmp;(* Update the timestamp of the existing cache entry, moving it to the
back of the trimming queue. *)Path.touchexisting_path;Result.Ok(Already_promoted{in_the_build_directory=path;in_the_cache=existing_path;digest=effective_digest})|Collision_chain.Not_found{next_available_path}->Path.mkdir_p(Path.parent_exnnext_available_path);letdest=Path.to_stringnext_available_pathin(* Move the temporary file to the cache. *)Unix.rename(Path.to_stringtmp)dest;(* Remove write permissions, making the cache entry immutable. We assume
that users do not modify the files in the cache. *)Unix.chmoddest(stat.st_permland0o555);Result.Ok(Promoted{in_the_build_directory=path;in_the_cache=next_available_path;digest=effective_digest})inlet+promoted=Result.List.map~f:promotepathsinletmetadata_path=path_metadatacachekeyandmetadata_tmp_path=Path.relativecache.temp_dir"metadata"andfiles=List.mappromoted~f:(function|Already_promotedf|Promotedf->f)inletmetadata_file:Metadata_file.t={metadata;files}inletmetadata=Csexp.to_string(Metadata_file.to_sexpmetadata_file)inIo.write_filemetadata_tmp_pathmetadata;let()=matchIo.read_filemetadata_pathwith|contents->ifcontents<>metadatathenUser_warning.emit[Pp.textf"non reproductible collision on rule %s"(Digest.to_stringkey)]|exceptionSys_error_->Path.mkdir_p(Path.parent_exnmetadata_path)inPath.renamemetadata_tmp_pathmetadata_path;(* The files that have already been present in the cache can be deduplicated,
i.e. replaced with hardlinks to their cached copies. *)(matchcache.duplication_modewith|Copy->()|Hardlink->List.iterpromoted~f:(function|Already_promotedfile->cache.command_handler(Dedupfile)|_->()));(metadata_file,promoted)letpromotecachepathskeymetadata~repository~duplication=Result.map~f:ignore(promote_synccachepathskeymetadata~repository~duplication)letsearchcachekey=letpath=path_metadatacachekeyinlet*sexp=tryIo.with_file_inpath~f:Csexp.inputwithSys_error_->Error"no cached file"inlet+metadata=Metadata_file.of_sexpsexpin(* Touch cache files so they are removed last by LRU trimming. *)let()=letf(file:File.t)=(* There is no point in trying to trim out files that are missing : dune
will have to check when hardlinking anyway since they could disappear
inbetween. *)tryPath.touch~create:falsefile.in_the_cachewithUnix.(Unix_error(ENOENT,_,_))->()inList.iter~fmetadata.filesin(metadata.metadata,metadata.files)letset_build_dircachep={cachewithbuild_root=Somep}letteardowncache=Path.rm_rf~allow_external:truecache.temp_dirletdetect_duplication_moderoot=let()=Path.mkdir_prootinletbeacon=Path.relativeroot"beacon"andtarget=Path.relativePath.build_dir".cache-beacon"inlet()=Path.touchbeaconinletrectest()=matchPath.linkbeacontargetwith|exceptionUnix.Unix_error(Unix.EEXIST,_,_)->Path.unlink_no_errtarget;test()|exceptionUnix.Unix_error_->Duplication_mode.Copy|()->Duplication_mode.Hardlinkintest()letmake?(root=default_root())?(duplication_mode=detect_duplication_moderoot)?(log=Dune_util.Log.info)?(warn=funpp->User_warning.emitpp)~command_handler()=ifPath.basenameroot<>"v2"thenResult.Error"unable to read dune-cache"elseletres={root;build_root=None;info=log;warn;repositories=[];command_handler;duplication_mode;temp_dir=Path.temp_dir~temp_dir:root"promoting."("."^string_of_int(Unix.getpid()))}inPath.mkdir_p@@root_metadatares;Path.mkdir_p@@root_datares;Result.okresletduplication_modecache=cache.duplication_modelettrimmablestats=stats.Unix.st_nlink=1let_garbage_collectdefault_trimcache=letroot=root_metadatacacheinletmetas=List.map~f:(funp->(p,Metadata_file.parsep))(FSSchemeImpl.list~root)inletfdefault_trim=function|p,Result.Errormsg->cache.warn[Pp.textf"remove invalid metadata file %s: %s"(Path.to_string_maybe_quotedp)msg];Path.unlink_no_errp;{default_trimwithTrimming_result.trimmed_metafiles=[p]}|p,Result.Ok{Metadata_file.files;_}->ifList.for_all~f:(fun{File.in_the_cache;_}->Path.existsin_the_cache)filesthendefault_trimelse(cache.info[Pp.textf"remove metadata file %s as some produced files are missing"(Path.to_string_maybe_quotedp)];letres=List.fold_left~init:default_trim~f:(funtrimf->letp=f.File.in_the_cacheintryletstats=Path.statpiniftrimmablestatsthen(Path.unlink_no_errp;Trimming_result.addtrim~file:p~size:stats.st_size)elsetrimwithUnix.Unix_error(Unix.ENOENT,_,_)->trim)filesinPath.unlink_no_errp;res)inList.fold_left~init:default_trim~fmetasletgarbage_collect=_garbage_collectTrimming_result.emptylettrimcachefree=letroot=root_datacacheinletfiles=FSSchemeImpl.list~rootinletfpath=letstats=Path.statpathiniftrimmablestatsthenSome(path,stats.st_size,stats.st_mtime)elseNoneandcompare(_,_,t1)(_,_,t2)=Ordering.of_int(Stdlib.comparet1t2)inletfiles=List.sort~compare(List.filter_map~ffiles)anddelete(trim:Trimming_result.t)(path,size,_)=iftrim.trimmed_files_size>=freethentrimelse(Path.unlinkpath;Trimming_result.addtrim~size~file:path)inlettrim=List.fold_left~init:Trimming_result.empty~f:deletefilesin_garbage_collecttrimcacheletoverhead_sizecache=letroot=root_datacacheinletfiles=FSSchemeImpl.list~rootinletstats=letfp=tryletstats=Path.statpiniftrimmablestatsthenstats.st_sizeelse0withUnix.Unix_error(Unix.ENOENT,_,_)->0inList.map~ffilesinList.fold_left~f:(funaccsize->acc+size)~init:0stats