Commit 0ee6955c authored by José Henrique's avatar José Henrique
Browse files

Cleanup

parent c9603b9b
......@@ -15,7 +15,7 @@ def default_value(key, def_value):
build_env_vars = {}
for k in ["device", "version", "production", "eng_build",
"wip_branch", "generate_incremental", "repopick",
"wip_branch", "repopick",
"CI_RUNNER_DESCRIPTION", "CI_JOB_URL", "CI_PIPELINE_ID",
"build_repo_url", "build_repo_branch"]:
if os.getenv(k):
......@@ -30,7 +30,6 @@ if "version" not in build_env_vars:
default_value("production", "false")
default_value("eng_build", "false")
default_value("wip_branch", "false")
default_value("generate_incremental", build_env_vars["production"])
default_value("CI_RUNNER_DESCRIPTION", socket.gethostname())
manifest_url_main = "https://github.com/PixelExperience/manifest"
......@@ -47,7 +46,6 @@ elif build_env_vars["version"] == "eleven":
build_env_vars["manifest_url"] = manifest_url_main
build_env_vars["manifest_branch"] = "eleven"
build_env_vars["fallback_branch"] = "eleven"
build_env_vars["generate_incremental"] = "false"
else:
sys.exit("Invalid version")
......
......@@ -7,11 +7,6 @@ import re
from post_build_utils import *
files_to_upload = []
incremental_info = None
def is_incremental_and_target_files_enabled(post_build_env_vars):
return post_build_env_vars["version"] != "eleven"
def sync_tools(post_build_env_vars):
......@@ -50,20 +45,10 @@ def create_release_zip(post_build_env_vars):
ota_from_target_files(
target_files_path,
post_build_env_vars["build_name"],
sign=post_build_env_vars["production"])
post_build_env_vars["production"])
files_to_upload.append(
post_build_env_vars["build_name"])
if is_incremental_and_target_files_enabled(post_build_env_vars) and (
post_build_env_vars["production"] or
post_build_env_vars["generate_incremental"]):
print("Compressing artifact using zstd...")
compress_target_files(
target_files_path,
post_build_env_vars["target_files_compressed_path"])
files_to_upload.append(
post_build_env_vars["target_files_compressed_path"])
def extract_recovery(post_build_env_vars):
if post_build_env_vars["aosp_recovery"]:
......@@ -87,86 +72,9 @@ def extract_recovery(post_build_env_vars):
post_build_env_vars["aosp_recovery_path"])
def generate_incremental(post_build_env_vars):
global incremental_info
target_files_path = (
post_build_env_vars["target_files_path"]
if post_build_env_vars["production"] else
post_build_env_vars["unsigned_target_files_path"])
if (is_incremental_and_target_files_enabled(post_build_env_vars) and
post_build_env_vars["generate_incremental"]):
print("Incremental generation enabled, searching for" +
" latest released build...")
previous_build_name = get_previous_build(
post_build_env_vars["device"],
post_build_env_vars["version"],
post_build_env_vars["production"])
if previous_build_name == "":
print("Incremental generation not required because there's" +
" no previous production or test build.")
else:
print("Latest released build: " + previous_build_name +
", searching for artifact availability...")
previous_target_files_hash = get_artifact_info(
post_build_env_vars["device"], previous_build_name)
if not previous_target_files_hash:
print(
"Incremental generation not required because there's" +
" no previous production or test build.")
else:
print("Downloading artifact...")
download_artifact(
post_build_env_vars["device"],
previous_build_name,
previous_build_name + ".zst")
print("Checking file hash...")
artifact_md5 = get_md5(
previous_build_name + ".zst")
assert artifact_md5 == previous_target_files_hash
print("Decompressing artifact using zstd...")
decompress_target_files(previous_build_name + ".zst")
print("Generating incremental...")
build_date = re.search(
r'\d{4}\d{2}\d{2}-\d{2}\d{2}',
post_build_env_vars["build_name"]).group()
if post_build_env_vars["production"]:
incremental_build_name = (
previous_build_name.replace("-OFFICIAL.zip", "") +
"-update-" + build_date + "-OFFICIAL.zip")
else:
incremental_build_name = (
previous_build_name.replace("-CI.zip", "") +
"-update-" + build_date + "-CI.zip")
ota_from_target_files(
target_files_path,
incremental_build_name,
sign=post_build_env_vars["production"],
file_based_incremental=(
post_build_env_vars["ab_update"] is False),
previous_target_files_path=previous_build_name)
incremental_info = {
"name": incremental_build_name,
"size": os.path.getsize(incremental_build_name),
"md5_hash": get_md5(incremental_build_name),
"base_name": previous_build_name
}
files_to_upload.append(incremental_build_name)
def send_to_release_cp(post_build_env_vars):
global incremental_info
print("Uploading build artifacts to releasecp...")
has_target_files = (
is_incremental_and_target_files_enabled(post_build_env_vars) and
(post_build_env_vars["production"] or
post_build_env_vars["generate_incremental"])
)
has_aosp_recovery = post_build_env_vars["aosp_recovery"]
json_str = json.dumps({
......@@ -180,13 +88,7 @@ def send_to_release_cp(post_build_env_vars):
"aosp_recovery_md5_hash": (
get_md5(post_build_env_vars["aosp_recovery_path"])
if has_aosp_recovery else
None),
"has_target_files": has_target_files,
"target_files_md5_hash": (
get_md5(post_build_env_vars["target_files_compressed_path"])
if has_target_files else
None),
"incremental": incremental_info
None)
})
json_path = post_build_env_vars["build_name"].replace(".zip", ".json")
......@@ -213,5 +115,4 @@ def start_post_build(post_build_env_vars):
sync_tools(post_build_env_vars)
create_release_zip(post_build_env_vars)
extract_recovery(post_build_env_vars)
generate_incremental(post_build_env_vars)
send_to_release_cp(post_build_env_vars)
......@@ -23,7 +23,6 @@ def parse_build_prop_file(build_prop_path):
def init_post_build_env_vars(build_env_vars):
result = build_env_vars
result["production"] = result["production"] == "true"
result["generate_incremental"] = result["generate_incremental"] == "true"
if not os.getenv("DEBUG") and os.path.exists(result["post_build_dir"]):
shutil.rmtree(result["post_build_dir"])
......@@ -67,8 +66,6 @@ def init_post_build_env_vars(build_env_vars):
raise Exception("Unable to find file: " + unsigned_target_files_path_glob)
result["target_files_path"] = (
result["build_name"].replace(".zip", "_target_files.zip"))
result["target_files_compressed_path"] = (
result["target_files_path"] + ".zst")
# Repos
build_repo_url = ("https://github.com/PixelExperience/build"
......
......@@ -46,18 +46,12 @@ def sign_target_files_apks(unsigned_target_files_path, dest_target_files_path,
def ota_from_target_files(target_files_path,
dest_file_path,
sign=False,
file_based_incremental=False,
previous_target_files_path=False):
sign=False):
release_key_path = "signing_keys/default/releasekey"
cmd = "python2 build/make/tools/releasetools/ota_from_target_files"
cmd += " -p out/host/linux-x86"
if file_based_incremental:
cmd += " --file"
if sign:
cmd += " -k " + release_key_path
if previous_target_files_path:
cmd += " -i " + previous_target_files_path
cmd += " " + target_files_path
cmd += " " + dest_file_path
result = run_command(cmd)
......@@ -67,24 +61,6 @@ def ota_from_target_files(target_files_path,
str(result))
def compress_target_files(target_files_path, dest_file_path):
cmd = "zstd -f -3 " + target_files_path + " -o " + dest_file_path
result = run_command(cmd)
if result != 0:
raise Exception(
"Failed to run zstd, process returned code " +
str(result))
def decompress_target_files(target_files_path):
cmd = "zstd -d -f --rm " + target_files_path
result = run_command(cmd)
if result != 0:
raise Exception(
"Failed to run zstd, process returned code " +
str(result))
def sync_repo(url, path, branch):
print("Syncing repo " + url + " (" + branch + " branch) to path " + path)
git.Repo.clone_from(
......@@ -134,65 +110,6 @@ def upload_to_ftp(host, username, password, parent_dir, files):
ftp.storbinary("STOR " + file_name, file)
def get_previous_build(device, version, production):
api_url = ("https://download.pixelexperience.org/ota_v4"
if production else
"https://download.pixelexperience.org/ota_ci")
response = requests.request(
"GET", api_url + "/" + device + "/" + version)
assert response.status_code == 200
response = response.json()
return response["filename"]
def get_artifact_info(device, build):
api_url = "https://artifacts.pixelexperience.org/getInfo"
post_data = {
"api_key": os.getenv("GET_API_PRIV_KEY"),
"device": device,
"build": build
}
for i in range(1, 6):
print("Fetching artifact info (attempt " + str(i) + " of 5)")
try:
response = requests.request("POST", api_url, data=post_data)
status_code = response.status_code
assert status_code == 200
response = response.json()
if not response["error"] and response["available"]:
return response["md5_hash"]
break
except AssertionError:
print("Failed, server returned: " +
str(status_code) + ", trying again in 60 seconds...")
if i == 5:
raise Exception("Failed to get artifact info")
pass
time.sleep(60)
return False
def download_artifact(device, build, destination_path):
cmd = "curl --location --request POST" + \
" 'https://artifacts.pixelexperience.org/get'" + \
" --header 'Content-Type: application/x-www-form-urlencoded'" + \
" --data-urlencode 'api_key=" + os.getenv("GET_API_PRIV_KEY") + "'" + \
" --data-urlencode 'device=" + device + "'" + \
" --data-urlencode 'build=" + build + "'" + \
" --output '" + destination_path + "'"
for i in range(1, 6):
print("Trying to download artifact (attempt " + str(i) + " of 5)")
if run_command(cmd) == 0:
return True
print("Download failed, trying again in 60 seconds...")
time.sleep(60)
raise Exception(
"Failed to download artifact, process returned code " +
str(result))
def validate_release_tools(release_tools_path):
allowed_imports = ["hashlib", "common", "re", "struct"]
prohibited_keywords = ["eval", "exec", "__import__", "open"]
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment