Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Split Daml cron executable into separate modules #12449

Merged
merged 1 commit into from
Jan 18, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
151 changes: 151 additions & 0 deletions ci/cron/src/CheckReleases.hs
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
-- Copyright (c) 2022 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
-- SPDX-License-Identifier: Apache-2.0

module CheckReleases (check_releases) where

import Github

import qualified Control.Concurrent.Async
import qualified Control.Concurrent.QSem
import Control.Exception.Safe
import qualified Control.Monad as Control
import qualified Control.Monad.Extra
import Control.Retry
import Data.Conduit (runConduit, (.|))
import Data.Conduit.Combinators (sinkHandle)
import qualified Data.Foldable
import Data.Maybe (isJust)
import qualified Network.HTTP.Client as HTTP
import Network.HTTP.Client.Conduit (bodyReaderSource)
import qualified Network.HTTP.Client.TLS as TLS
import qualified Network.URI
import qualified System.Directory as Directory
import qualified System.Exit as Exit
import System.FilePath.Posix ((</>))
import qualified System.IO.Extra as IO
import qualified System.Process as System

shell :: String -> IO String
shell cmd = System.readCreateProcess (System.shell cmd) ""

shell_ :: String -> IO ()
shell_ cmd = Control.void $ shell cmd

download_assets :: FilePath -> GitHubRelease -> IO ()
download_assets tmp release = do
manager <- HTTP.newManager TLS.tlsManagerSettings
tokens <- Control.Concurrent.QSem.newQSem 20
Control.Concurrent.Async.forConcurrently_ (map uri $ assets release) $ \url ->
bracket_
(Control.Concurrent.QSem.waitQSem tokens)
(Control.Concurrent.QSem.signalQSem tokens)
(do
req <- add_github_contact_header <$> HTTP.parseRequest (show url)
recovering
retryPolicy
[retryHandler]
(\_ -> downloadFile req manager url)
)
where -- Retry for 5 minutes total, doubling delay starting with 20ms
retryPolicy = limitRetriesByCumulativeDelay (5 * 60 * 1000 * 1000) (exponentialBackoff (20 * 1000))
retryHandler status =
logRetries
(\e -> pure $ isJust (fromException @IOException e) || isJust (fromException @HTTP.HttpException e)) -- Don’t try to be clever, just retry
(\shouldRetry err status -> IO.hPutStrLn IO.stderr $ defaultLogMsg shouldRetry err status)
status
downloadFile req manager url = HTTP.withResponse req manager $ \resp -> do
IO.withBinaryFile (tmp </> (last $ Network.URI.pathSegments url)) IO.WriteMode $ \handle ->
runConduit $ bodyReaderSource (HTTP.responseBody resp) .| sinkHandle handle

verify_signatures :: FilePath -> FilePath -> String -> IO ()
verify_signatures bash_lib tmp version_tag = do
System.callCommand $ unlines ["bash -c '",
"set -euo pipefail",
"source \"" <> bash_lib <> "\"",
"shopt -s extglob", -- enable !() pattern: things that _don't_ match
"cd \"" <> tmp <> "\"",
"for f in !(*.asc); do",
"p=" <> version_tag <> "/github/$f",
"if ! test -f $f.asc; then",
"echo $p: no signature file",
"else",
"LOG=$(mktemp)",
"if gpg_verify $f.asc >$LOG 2>&1; then",
"echo $p: signature matches",
"else",
"echo $p: signature does not match",
"echo Full gpg output:",
"cat $LOG",
"exit 2",
"fi",
"fi",
"done",
"'"]

does_backup_exist :: String -> FilePath -> FilePath -> IO Bool
does_backup_exist gcp_credentials bash_lib path = do
out <- shell $ unlines ["bash -c '",
"set -euo pipefail",
"source \"" <> bash_lib <> "\"",
"GCRED=$(cat <<END",
gcp_credentials,
"END",
")",
"if gcs \"$GCRED\" ls \"" <> path <> "\" >/dev/null; then",
"echo True",
"else",
"echo False",
"fi",
"'"]
return $ read out

gcs_cp :: String -> FilePath -> FilePath -> FilePath -> IO ()
gcs_cp gcp_credentials bash_lib local_path remote_path = do
shell_ $ unlines ["bash -c '",
"set -euo pipefail",
"source \"" <> bash_lib <> "\"",
"GCRED=$(cat <<END",
gcp_credentials,
"END",
")",
"gcs \"$GCRED\" cp \"" <> local_path <> "\" \"" <> remote_path <> "\"",
"'"]

check_files_match :: String -> String -> IO Bool
check_files_match f1 f2 = do
(exitCode, stdout, stderr) <- System.readProcessWithExitCode "diff" [f1, f2] ""
case exitCode of
Exit.ExitSuccess -> return True
Exit.ExitFailure 1 -> return False
Exit.ExitFailure _ -> fail $ "Diff failed.\n" ++ "STDOUT:\n" ++ stdout ++ "\nSTDERR:\n" ++ stderr

check_releases :: Maybe String -> String -> Maybe Int -> IO ()
check_releases gcp_credentials bash_lib max_releases = do
releases' <- fetch_gh_paginated "https://api.github.com/repos/digital-asset/daml/releases"
let releases = case max_releases of
Nothing -> releases'
Just n -> take n releases'
Data.Foldable.for_ releases (\release -> recoverAll retryPolicy $ \_ -> do
let v = show $ tag release
putStrLn $ "Checking release " <> v <> " ..."
IO.withTempDir $ \temp_dir -> do
download_assets temp_dir release
verify_signatures bash_lib temp_dir v
Control.Monad.Extra.whenJust gcp_credentials $ \gcred -> do
files <- Directory.listDirectory temp_dir
Control.Concurrent.Async.forConcurrently_ files $ \f -> do
let local_github = temp_dir </> f
let local_gcp = temp_dir </> f <> ".gcp"
let remote_gcp = "gs://daml-data/releases/" <> v <> "/github/" <> f
exists <- does_backup_exist gcred bash_lib remote_gcp
if exists then do
gcs_cp gcred bash_lib remote_gcp local_gcp
check_files_match local_github local_gcp >>= \case
True -> putStrLn $ f <> " matches GCS backup."
False -> fail $ f <> " does not match GCS backup."
else do
fail $ remote_gcp <> " does not exist. Aborting.")
where
-- Retry for 10 minutes total, delay of 1s
retryPolicy = limitRetriesByCumulativeDelay (10 * 60 * 1000 * 1000) (constantDelay 1000_000)

220 changes: 220 additions & 0 deletions ci/cron/src/Docs.hs
Original file line number Diff line number Diff line change
@@ -0,0 +1,220 @@
-- Copyright (c) 2022 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
-- SPDX-License-Identifier: Apache-2.0

module Docs (docs, sdkDocOpts, damlOnSqlDocOpts) where

import Control.Exception.Safe
import qualified Control.Monad as Control
import qualified Control.Monad.Extra
import qualified Data.Aeson as JSON
import qualified Data.ByteString.Lazy.UTF8 as LBS
import qualified Data.Foldable
import Data.Function ((&))
import qualified Data.HashMap.Strict as H
import qualified Data.List
import Data.Maybe (fromMaybe)
import qualified Data.Ord
import qualified Data.Set as Set
import qualified System.Directory as Directory
import qualified System.Environment
import qualified System.Exit as Exit
import System.FilePath.Posix ((</>))
import qualified System.IO.Extra as IO
import qualified System.Process as System

import Github

die :: String -> Int -> String -> String -> IO a
die cmd exit out err =
fail $ unlines ["Subprocess:",
cmd,
"failed with exit code " <> show exit <> "; output:",
"---",
out,
"---",
"err:",
"---",
err,
"---"]

shell :: String -> IO String
shell cmd = System.readCreateProcess (System.shell cmd) ""

proc :: [String] -> IO String
proc args = System.readCreateProcess (System.proc (head args) (tail args)) ""

shell_ :: String -> IO ()
shell_ cmd = do
Control.void $ shell cmd

proc_ :: [String] -> IO ()
proc_ args = Control.void $ proc args

shell_env_ :: [(String, String)] -> String -> IO ()
shell_env_ env cmd = do
parent_env <- System.Environment.getEnvironment
Control.void $ System.readCreateProcess ((System.shell cmd) {System.env = Just (parent_env ++ env)}) ""

robustly_download_nix_packages :: IO ()
robustly_download_nix_packages = do
h (10 :: Integer)
where
cmd = "nix-build nix -A tools -A ci-cached --no-out-link"
h n = do
(exit, out, err) <- System.readCreateProcessWithExitCode (System.shell cmd) ""
case (exit, n) of
(Exit.ExitSuccess, _) -> return ()
(Exit.ExitFailure exit, 0) -> die cmd exit out err
_ | "unexpected end-of-file" `Data.List.isInfixOf` err -> h (n - 1)
(Exit.ExitFailure exit, _) -> die cmd exit out err

s3Path :: DocOptions -> FilePath -> String
s3Path DocOptions{s3Subdir} file =
"s3://docs-daml-com" </> fromMaybe "" s3Subdir </> file

build_and_push :: DocOptions -> FilePath -> [Version] -> IO ()
build_and_push opts@DocOptions{build} temp versions = do
restore_sha $ do
Data.Foldable.for_ versions (\version -> do
putStrLn $ "Building " <> show version <> "..."
build temp version
putStrLn $ "Pushing " <> show version <> " to S3 (as subfolder)..."
push version
putStrLn "Done.")
where
restore_sha io =
bracket (init <$> shell "git symbolic-ref --short HEAD 2>/dev/null || git rev-parse HEAD")
(\cur_sha -> proc_ ["git", "checkout", cur_sha])
(const io)
push version =
proc_ ["aws", "s3", "cp",
temp </> show version,
s3Path opts (show version),
"--recursive", "--acl", "public-read"]

fetch_if_missing :: DocOptions -> FilePath -> Version -> IO ()
fetch_if_missing opts temp v = do
missing <- not <$> Directory.doesDirectoryExist (temp </> show v)
if missing then do
putStrLn $ "Downloading " <> show v <> "..."
proc_ ["aws", "s3", "cp", s3Path opts (show v), temp </> show v, "--recursive"]
putStrLn "Done."
else do
putStrLn $ show v <> " already present."

update_s3 :: DocOptions -> FilePath -> Versions -> IO ()
update_s3 opts temp vs = do
let displayed = dropdown vs
let hidden = Data.List.sortOn Data.Ord.Down $ Set.toList $ all_versions vs `Set.difference` Set.fromList displayed
-- The assistant depends on these three files, they are not just internal
-- to the docs process.
push (versions_json displayed) "versions.json"
push (versions_json hidden) "snapshots.json"
Control.Monad.Extra.whenJust (top vs) $ \latest -> push (show latest) "latest"
putStrLn "Done."
where
-- Not going through Aeson because it represents JSON objects as
-- unordered maps, and here order matters.
versions_json vs = vs
& map ((\s -> "\"" <> s <> "\": \"" <> s <> "\"") . show)
& Data.List.intercalate ", "
& \s -> "{" <> s <> "}"
push text name = do
writeFile (temp </> name) text
proc_ ["aws", "s3", "cp", temp </> name, s3Path opts name, "--acl", "public-read"]

update_top_level :: DocOptions -> FilePath -> Version -> Maybe Version -> IO ()
update_top_level opts temp new mayOld = do
new_files <- Set.fromList <$> Directory.listDirectory (temp </> show new)
old_files <- case mayOld of
Nothing -> pure Set.empty
Just old -> Set.fromList <$> Directory.listDirectory (temp </> show old)
let to_delete = Set.toList $ old_files `Set.difference` new_files
Control.when (not $ null to_delete) $ do
putStrLn $ "Deleting top-level files: " <> show to_delete
Data.Foldable.for_ to_delete (\f -> do
proc_ ["aws", "s3", "rm", s3Path opts f, "--recursive"])
putStrLn "Done."
putStrLn $ "Pushing " <> show new <> " to top-level..."
let path = s3Path opts "" <> "/"
proc_ ["aws", "s3", "cp", temp </> show new, path, "--recursive", "--acl", "public-read"]
putStrLn "Done."

reset_cloudfront :: IO ()
reset_cloudfront = do
putStrLn "Refreshing CloudFront cache..."
shell_ "aws cloudfront create-invalidation --distribution-id E1U753I56ERH55 --paths '/*'"

fetch_s3_versions :: DocOptions -> IO Versions
fetch_s3_versions opts = do
-- On the first run, this will fail so treat it like an empty file.
dropdown <- fetch "versions.json" False `catchIO` (\_ -> pure [])
hidden <- fetch "snapshots.json" True `catchIO` (\_ -> pure [])
return $ versions $ dropdown <> hidden
where fetch file prerelease = do
temp <- shell "mktemp"
proc_ ["aws", "s3", "cp", s3Path opts file, temp]
s3_raw <- proc ["cat", temp]
let type_annotated_value :: Maybe JSON.Object
type_annotated_value = JSON.decode $ LBS.fromString s3_raw
case type_annotated_value of
Just s3_json -> return $ map (\s -> GitHubRelease prerelease (version s) []) $ H.keys s3_json
Nothing -> Exit.die "Failed to get versions from s3"

data DocOptions = DocOptions
{ s3Subdir :: Maybe FilePath
, includedVersion :: Version -> Bool
-- Exclusive minimum version bound for which we build docs
, build :: FilePath -> Version -> IO ()
}

sdkDocOpts :: DocOptions
sdkDocOpts = DocOptions
{ s3Subdir = Nothing
-- versions prior to 0.13.10 cannot be built anymore and are not present in
-- the repo.
, includedVersion = \v -> v >= version "0.13.10"
, build = \temp version -> do
proc_ ["git", "checkout", "v" <> show version]
robustly_download_nix_packages
shell_env_ [("DAML_SDK_RELEASE_VERSION", show version)] "bazel build //docs:docs"
proc_ ["mkdir", "-p", temp </> show version]
proc_ ["tar", "xzf", "bazel-bin/docs/html.tar.gz", "--strip-components=1", "-C", temp </> show version]
}

damlOnSqlDocOpts :: DocOptions
damlOnSqlDocOpts = DocOptions
{ s3Subdir = Just "daml-driver-for-postgresql"
, includedVersion = \v -> v > version "1.8.0-snapshot.20201201.5776.0.4b91f2a6"
, build = \temp version -> do
proc_ ["git", "checkout", "v" <> show version]
robustly_download_nix_packages
shell_env_ [("DAML_SDK_RELEASE_VERSION", show version)] "bazel build //ledger/daml-on-sql:docs"
proc_ ["mkdir", "-p", temp </> show version]
proc_ ["tar", "xzf", "bazel-bin/ledger/daml-on-sql/html.tar.gz", "--strip-components=1", "-C", temp </> show version]
}

docs :: DocOptions -> IO ()
docs opts@DocOptions{includedVersion} = do
putStrLn "Checking for new version..."
gh_versions <- fetch_gh_versions includedVersion
s3_versions <- fetch_s3_versions opts
if s3_versions == gh_versions
then do
putStrLn "Versions match, nothing to do."
else do
-- We may have added versions. We need to build and push them.
let added = Set.toList $ all_versions gh_versions `Set.difference` all_versions s3_versions
IO.withTempDir $ \temp_dir -> do
putStrLn $ "Versions to build: " <> show added
build_and_push opts temp_dir added
-- If there is no version on GH, we don’t have to do anything.
Control.Monad.Extra.whenJust (top gh_versions) $ \gh_top ->
Control.when (Just gh_top /= top s3_versions) $ do
putStrLn $ "Updating top-level version from " <> (show $ top s3_versions) <> " to " <> (show $ top gh_versions)
fetch_if_missing opts temp_dir gh_top
Control.Monad.Extra.whenJust (top s3_versions) (fetch_if_missing opts temp_dir)
update_top_level opts temp_dir gh_top (top s3_versions)
putStrLn "Updating versions.json, snapshots.json, latest..."
update_s3 opts temp_dir gh_versions
reset_cloudfront
Loading