Commit c313adb6 authored by José Henrique's avatar José Henrique
Browse files

Implement incrementals

parent 8c4440b1
......@@ -2,10 +2,12 @@
import os
import sys
import json
import re
from post_build_utils import *
files_to_upload = []
incremental_info = None
def sync_tools(post_build_env_vars):
......@@ -37,7 +39,7 @@ def create_release_zip(post_build_env_vars):
if (post_build_env_vars["production"]
or post_build_env_vars["generate_incremental"]):
print("Compressing target files using zstd...")
print("Compressing artifact using zstd...")
compress_target_files(
target_files_path,
post_build_env_vars["target_files_compressed_path"])
......@@ -67,7 +69,78 @@ def extract_recovery(post_build_env_vars):
post_build_env_vars["aosp_recovery_path"])
def generate_incremental(post_build_env_vars):
global incremental_info
target_files_path = (
post_build_env_vars["target_files_path"]
if post_build_env_vars["production"] else
post_build_env_vars["unsigned_target_files_path"])
if post_build_env_vars["generate_incremental"]:
print("Incremental generation enabled, searching for" +
" latest released build...")
previous_build_name = get_previous_build(
post_build_env_vars["device"],
post_build_env_vars["version"],
post_build_env_vars["production"])
if previous_build_name == "":
print("Incremental generation not required because there's" +
" no previous production or test build.")
else:
print("Latest released build: " + previous_build_name +
", searching for artifact availability...")
previous_target_files_hash = get_artifact_info(
post_build_env_vars["device"], previous_build_name)
if not previous_target_files_hash:
print(
"Incremental generation not required because there's" +
" no previous production or test build.")
else:
print("Downloading artifact...")
download_artifact(
post_build_env_vars["device"],
previous_build_name,
previous_build_name + ".zst")
print("Checking file hash...")
artifact_md5 = get_md5(
previous_build_name + ".zst")
assert artifact_md5 == previous_target_files_hash
print("Decompressing artifact using zstd...")
decompress_target_files(previous_build_name + ".zst")
print("Generating incremental...")
build_date = re.search(
r'\d{4}\d{2}\d{2}-\d{2}\d{2}',
post_build_env_vars["build_name"]).group()
if post_build_env_vars["production"]:
incremental_build_name = (
previous_build_name.replace("-OFFICIAL.zip", "") +
"-update-" + build_date + "-OFFICIAL.zip")
else:
incremental_build_name = (
previous_build_name.replace("-CI.zip", "") +
"-update-" + build_date + "-CI.zip")
ota_from_target_files(
target_files_path,
incremental_build_name,
sign=post_build_env_vars["production"],
file_based_incremental=(
post_build_env_vars["ab_update"] is False),
previous_target_files_path=previous_build_name)
incremental_info = {
"name": incremental_build_name,
"size": os.path.getsize(incremental_build_name),
"md5_hash": get_md5(incremental_build_name),
"base_name": previous_build_name
}
files_to_upload.append(incremental_build_name)
def send_to_release_cp(post_build_env_vars):
global incremental_info
print("Uploading build artifacts to releasecp...")
has_target_files = (post_build_env_vars["production"] or
......@@ -90,7 +163,8 @@ def send_to_release_cp(post_build_env_vars):
"target_files_md5_hash": (
get_md5(post_build_env_vars["target_files_compressed_path"])
if has_target_files else
None)
None),
"incremental": incremental_info
})
json_path = post_build_env_vars["build_name"].replace(".zip", ".json")
......@@ -116,4 +190,5 @@ def start_post_build(post_build_env_vars):
sync_tools(post_build_env_vars)
create_release_zip(post_build_env_vars)
extract_recovery(post_build_env_vars)
generate_incremental(post_build_env_vars)
send_to_release_cp(post_build_env_vars)
......@@ -35,7 +35,8 @@ def init_post_build_env_vars(build_env_vars):
os.chdir("post_build")
unsigned_target_files_path_glob = result["working_dir"] + \
"/out/target/product/*/obj/PACKAGING/target_files_intermediates/*-target_files-*.zip"
"/out/target/product/*/obj/PACKAGING/" + \
"target_files_intermediates/*-target_files-*.zip"
result["unsigned_target_files_path"] = None
......
......@@ -5,6 +5,7 @@ import git
import traceback
import hashlib
import ftplib
import requests
from zipfile import ZipFile
......@@ -27,12 +28,19 @@ def sign_target_files_apks(unsigned_target_files_path, dest_target_files_path):
str(result))
def ota_from_target_files(target_files_path, dest_file_path,
sign=False):
def ota_from_target_files(target_files_path,
dest_file_path,
sign=False,
file_based_incremental=False,
previous_target_files_path=False):
release_key_path = "signing_keys/default/releasekey"
cmd = "python2 build/make/tools/releasetools/ota_from_target_files"
if file_based_incremental:
cmd += " --file"
if sign:
cmd += " -k" + release_key_path
cmd += " -k " + release_key_path
if previous_target_files_path:
cmd += " -i " + previous_target_files_path
cmd += " " + target_files_path
cmd += " " + dest_file_path
result = run_command(cmd)
......@@ -43,7 +51,16 @@ def ota_from_target_files(target_files_path, dest_file_path,
def compress_target_files(target_files_path, dest_file_path):
cmd = "zstd -3 " + target_files_path + " -o " + dest_file_path
cmd = "zstd -f -3 " + target_files_path + " -o " + dest_file_path
result = run_command(cmd)
if result != 0:
sys.exit(
"Failed to run zstd, process returned code " +
str(result))
def decompress_target_files(target_files_path):
cmd = "zstd -d -f --rm " + target_files_path
result = run_command(cmd)
if result != 0:
sys.exit(
......@@ -90,3 +107,45 @@ def upload_to_ftp(host, username, password, parent_dir, files):
with open(file_name, "rb") as file:
print("Uploading " + file_name)
ftp.storbinary("STOR " + file_name, file)
def get_previous_build(device, version, production):
api_url = ("https://download.pixelexperience.org/ota_v4"
if production else
"https://download.pixelexperience.org/ota_ci")
response = requests.request(
"GET", api_url + "/" + device + "/" + version)
assert response.status_code == 200
response = response.json()
return response["filename"]
def get_artifact_info(device, build):
api_url = "https://artifacts.pixelexperience.org/getInfo"
post_data = {
"api_key": os.getenv("GET_API_PRIV_KEY"),
"device": device,
"build": build
}
response = requests.request("POST", api_url, data=post_data)
assert response.status_code == 200
response = response.json()
if not response["error"] and response["available"]:
return response["md5_hash"]
return False
def download_artifact(device, build, destination_path):
cmd = "curl --location --request POST" + \
" 'https://artifacts.pixelexperience.org/get'" + \
" --header 'Content-Type: application/x-www-form-urlencoded'" + \
" --data-urlencode 'api_key=" + os.getenv("GET_API_PRIV_KEY") + "'" + \
" --data-urlencode 'device=" + device + "'" + \
" --data-urlencode 'build=" + build + "'" + \
" --output '" + destination_path + "'"
result = run_command(cmd)
if result != 0:
sys.exit(
"Failed to download artifact, process returned code " +
str(result))
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment