-
Notifications
You must be signed in to change notification settings - Fork 592
Expand file tree
/
Copy pathbootstrap.sh
More file actions
executable file
·314 lines (283 loc) · 12.5 KB
/
bootstrap.sh
File metadata and controls
executable file
·314 lines (283 loc) · 12.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
#!/usr/bin/env bash
# TODO: THIS SCRIPT SHOULD NOW BE ABLE TO REPLACE TRANSPILATION AND VK GENERATION WITH 'bb aztec_process'.
#
# Some notes if you have to work on this script.
# - First of all, I'm sorry (edit: not sorry). It's a beautiful script but it's no fun to debug. I got carried away.
# - You can enable BUILD_SYSTEM_DEBUG=1 but the output is quite verbose that it's not much use by default.
# - This flag however, isn't carried into exported functions. You need to do "set -x" in those functions manually.
# - You can call ./bootstrap.sh compile <contract names> to compile and process select contracts.
# - You can disable further parallelism by setting passing 1 as arg to 'parallelize' and with PARALLELISM=1.
# - The exported functions called by parallel must enable their own flags at the start e.g. set -euo pipefail
# - The exported functions are using stdin/stdout, so be very careful about what's printed where.
# - The exported functions need to have external variables they require, to have been exported first.
# - You can't export bash arrays or maps to be used by external functions, only strings.
# - If you want to echo something, send it to stderr e.g. echo_stderr "My debug"
# - If you call another script, be sure it also doesn't output something you don't want.
# - Local assignments with sub-shells don't propagate errors e.g. local capture=$(false). Declare locals separately.
# - Just ask me (charlie) for guidance if you're suffering.
# - I remain convinced we don't need node for these kinds of things, and we can be more performant/expressive with bash.
# - We could perhaps make it less tricky to work with by leveraging more tempfiles and less stdin/stdout.
source $(git rev-parse --show-toplevel)/ci3/source_bootstrap
# entrypoint for docs
if [ -n "${DOCS_WORKING_DIR:-}" ]; then
cd "$DOCS_WORKING_DIR"
fi
export RAYON_NUM_THREADS=${RAYON_NUM_THREADS:-16}
export HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-16}
export PLATFORM_TAG=any
export BB=${BB:-../../barretenberg/cpp/build/bin/bb}
export NARGO=${NARGO:-../../noir/noir-repo/target/release/nargo}
export TRANSPILER=${TRANSPILER:-../../avm-transpiler/target/release/avm-transpiler}
export STRIP_AZTEC_NR_PREFIX=${STRIP_AZTEC_NR_PREFIX:-./scripts/strip_aztec_nr_prefix.sh}
export BB_HASH=${BB_HASH:-$(../../barretenberg/cpp/bootstrap.sh hash)}
export NOIR_HASH=${NOIR_HASH:-$(../../noir/bootstrap.sh hash)}
export tmp_dir=./target/tmp
# Remove our tmp dir from last run.
# Note: This can use BASH 'trap' for better cleanliness, but the script has been hitting edge-cases so is (temporarily?) simplified.
rm -rf $tmp_dir
mkdir -p $tmp_dir
# Set common flags for parallel.
export PARALLEL_FLAGS="-j${PARALLELISM:-16} --halt now,fail=1 --memsuspend $(memsuspend_limit)"
# This computes a vk and adds it to the input function json if it's private, else returns same input.
# stdin has the function json.
# stdout receives the function json with the vk added (if private).
# The function is exported and called by a sub-shell in parallel, so we must "set -eu" etc..
# If debugging, a set -x at the start can help.
function process_function {
set -euo pipefail
local func name bytecode_b64 hash vk
contract_hash=$1
# Read the function json.
func="$(cat)"
name=$(echo "$func" | jq -r '.name')
echo_stderr "Processing function: $name..."
# Check if the function is neither public nor unconstrained.
# TODO: Why do we need to gen keys for functions that are not marked private?
# We allow the jq call to error (set +e) because it returns an error code if the result is false.
# We then differentiate between a real error, and the result being false.
set +e
make_vk=$(echo "$func" | jq -e '(.custom_attributes | index("public") == null) and (.is_unconstrained == false)')
if [ $? -ne 0 ] && [ "$make_vk" != "false" ]; then
echo_stderr "Failed to check function $name is neither public nor unconstrained."
exit 1
fi
set -e
if [ "$make_vk" == "true" ]; then
# It's a private function.
# Build hash, check if in cache.
# If it's in the cache it's extracted to $tmp_dir/$hash
bytecode_b64=$(echo "$func" | jq -r '.bytecode')
hash=$((echo "$BB_HASH"; echo "$bytecode_b64") | sha256sum | tr -d ' -')
if ! cache_download vk-$contract_hash-$hash.tar.gz >&2; then
# It's not in the cache. Generate the vk file and upload it to the cache.
echo_stderr "Generating vk for function: $name..."
local outdir=$(mktemp -d -p $tmp_dir)
echo "$bytecode_b64" | base64 -d | gunzip | $BB write_vk --scheme chonk -b - -o $outdir -v
mv $outdir/vk $tmp_dir/$contract_hash/$hash
cache_upload vk-$contract_hash-$hash.tar.gz $tmp_dir/$contract_hash/$hash
fi
# Return (echo) json containing the base64 encoded verification key.
vk=$(cat $tmp_dir/$contract_hash/$hash | base64 -w 0)
echo "$func" | jq -c --arg vk "$vk" '. + {verification_key: $vk}'
else
echo_stderr "Function $name is neither public nor unconstrained, skipping."
# Not a private function. Return the original function json.
echo "$func"
fi
}
export -f process_function
# Compute hash for a given contract.
# $1 is the contract name, $2 is the folder name (e.g. "contracts" or "examples")
function get_contract_hash {
local contract_path=$(get_contract_path "$1" "$2")
if [ "$2" = "examples" ]; then
# Called from docs
hash_str \
$NOIR_HASH \
$(cache_content_hash \
../avm-transpiler/.rebuild_patterns \
../barretenberg/cpp/.rebuild_patterns \
../barretenberg/ts/.rebuild_patterns \
"^docs/examples/$contract_path/" \
"^noir-projects/aztec-nr/" \
"^noir-projects/noir-protocol-circuits/crates/types/")
else
# Called from noir-contracts
hash_str \
$NOIR_HASH \
$(cache_content_hash \
../../avm-transpiler/.rebuild_patterns \
../../barretenberg/cpp/.rebuild_patterns \
../../barretenberg/ts/.rebuild_patterns \
"^noir-projects/noir-contracts/contracts/$contract_path/" \
"^noir-projects/noir-contracts/contracts/protocol/aztec_sublib/" \
"^noir-projects/aztec-nr/" \
"^noir-projects/noir-protocol-circuits/crates/types/")
fi
}
export -f get_contract_hash
# Extract contract path from Nargo.toml based on argument
# Handle both formats: full path relative to contracts/ or just contract name
# E.g. for both "ecdsa_k_account_contract" and "account/ecdsa_k_account_contractor" returns
# "account/ecdsa_k_account_contractor"
#
# $1 is the contract input, $2 is the folder name (e.g. "contracts" or "examples")
# This is done to ensure that both paths can be provided as inputs to the script.
function get_contract_path {
local input=$1
local folder_name=$2
local contract_path
if [[ $input == *"/"* ]]; then
# Full path provided (e.g. account/ecdsa_k_account_contract)
contract_path=$input
else
# Just contract name provided (e.g. ecdsa_k_account_contract)
contract_path=$(grep -oP "(?<=$folder_name/)[^\"]+/$input" Nargo.toml)
if [ -z "$contract_path" ]; then
echo "Contract $input not found in Nargo.toml" >&2
exit 1
fi
fi
echo "$contract_path"
}
export -f get_contract_path
# This compiles a noir contract, transpile's public functions, and generates vk's for private functions.
# $1 is the input package name, $2 is the folder name (e.g. "contracts" or "examples")
# On exit it's fully processed json artifact is in the target dir.
# The function is exported and called by a sub-shell in parallel, so we must "set -eu" etc..
function compile {
set -euo pipefail
local contract_name contract_hash
local contract_path=$(get_contract_path "$1" "$2")
local contract=$(grep -oP '(?<=^name = ")[^"]+' "$2/$contract_path/Nargo.toml")
# Calculate filename because nargo...
contract_name=$(cat $2/$contract_path/src/main.nr | awk '/^contract / { print $2 } /^pub contract / { print $3 }')
local filename="$contract-$contract_name.json"
local json_path="./target/$filename"
contract_hash=$(get_contract_hash $1 $2)
if ! cache_download contract-$contract_hash.tar.gz; then
$NARGO compile --package $contract --inliner-aggressiveness 0 --deny-warnings
$TRANSPILER $json_path $json_path
$STRIP_AZTEC_NR_PREFIX $json_path
cache_upload contract-$contract_hash.tar.gz $json_path
fi
# We segregate equivalent vk's created by process_function. This was done to narrow down potential edge cases with identical VKs
# reading from cache at the same time. Create this folder up-front.
mkdir -p $tmp_dir/$contract_hash
# Pipe each contract function, one per line (jq -c), into parallel calls of process_function.
# The returned jsons from process_function are converted back to a json array in the second jq -s call.
# When slurping (-s) in the last jq, we get an array of two elements:
# .[0] is the original json (at $json_path)
# .[1] is the updated functions on stdin (-)
# * merges their fields.
# Write each function to a separate temp file to avoid pipe/stdin issues with large JSON
local func_dir=$(mktemp -d -p $tmp_dir)
local i=0
while IFS= read -r func_json; do
echo "$func_json" > "$func_dir/$i.json"
((i++)) || true
done < <(jq -c '.functions[]' $json_path)
# Process each function file in parallel
ls "$func_dir"/*.json | sort -V | \
parallel $PARALLEL_FLAGS --keep-order 'cat {} | process_function '"$contract_hash" | \
jq -s '{functions: .}' | jq -s '.[0] * {functions: .[1].functions}' $json_path - > $tmp_dir/$filename
mv $tmp_dir/$filename $json_path
}
export -f compile
# If given an argument, it's the contract to compile.
# Otherwise parse out all relevant contracts from the root Nargo.toml and process them in parallel.
function build {
echo_stderr "Compiling contracts (bb-hash: $BB_HASH)..."
local folder_name
if [ -n "${DOCS_WORKING_DIR:-}" ]; then
folder_name="examples"
else
folder_name="contracts"
fi
if [ "$#" -eq 0 ]; then
rm -rf target
mkdir -p $tmp_dir
local contracts=$(grep -oP "(?<=$folder_name/)[^\"]+" Nargo.toml)
else
local contracts="$@"
fi
set +e
parallel $PARALLEL_FLAGS --joblog joblog.txt -v --line-buffer --tag compile {} $folder_name ::: ${contracts[@]}
code=$?
cat joblog.txt
return $code
}
function test_cmds {
local folder_name
if [ -n "${DOCS_WORKING_DIR:-}" ]; then
folder_name="examples"
else
folder_name="contracts"
fi
# Test bb aztec_process command
echo "$BB_HASH noir-projects/scripts/test_aztec_process.sh"
# Fairies want to run these tests on every PR
if [ "${TARGET_BRANCH:-}" = "merge-train/fairies" ]; then
i=0
$NARGO test --list-tests --silence-warnings | sort | while read -r package test; do
port=$((14730 + (i++ % ${NUM_TXES:-1})))
echo "disabled-cache noir-projects/scripts/run_test.sh noir-contracts $package $test $port"
done
else
local -A cache
i=0
$NARGO test --list-tests --silence-warnings | sort | while read -r package test; do
port=$((14730 + (i++ % ${NUM_TXES:-1})))
[ -z "${cache[$package]:-}" ] && cache[$package]=$(get_contract_hash $package $folder_name)
echo "${cache[$package]} noir-projects/scripts/run_test.sh noir-contracts $package $test $port"
done
fi
}
function test {
# Starting txe servers with incrementing port numbers.
# Base port is below the Linux ephemeral range (32768-60999) to avoid conflicts.
local txe_base_port=14730
export NUM_TXES=1
trap 'kill $(jobs -p) &>/dev/null || true' EXIT
for i in $(seq 0 $((NUM_TXES-1))); do
check_port $((txe_base_port + i)) || echo "WARNING: port $((txe_base_port + i)) is in use, TXE $i may fail to start"
(cd $root/yarn-project/txe && UV_THREADPOOL_SIZE=8 LOG_LEVEL=silent TXE_PORT=$((txe_base_port + i)) yarn start) >/dev/null &
done
echo "Waiting for TXE's to start..."
for i in $(seq 0 $((NUM_TXES-1))); do
local j=0
local port=$((txe_base_port + i))
while ! nc -z 127.0.0.1 $port &>/dev/null; do
if [ $j == 60 ]; then
echo "TXE $i failed to start on port $port after 60s." >&2
check_port $port
exit 1
fi
sleep 1
j=$((j+1))
done
done
export NARGO_FOREIGN_CALL_TIMEOUT=300000
test_cmds | filter_test_cmds | parallelize
}
function format {
$NARGO fmt
}
case "$cmd" in
"clean-keys")
for artifact in target/*.json; do
echo_stderr "Scrubbing vk from $artifact..."
jq '.functions |= map(del(.verification_key))' "$artifact" > "${artifact}.tmp"
mv "${artifact}.tmp" "$artifact"
done
;;
"")
build
;;
"compile")
VERBOSE=${VERBOSE:-1} build "$@"
;;
*)
default_cmd_handler "$@"
;;
esac