summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--aosp-merger/README.md33
-rwxr-xr-xaosp-merger/aosp-merger.sh124
-rwxr-xr-xaosp-merger/branches_rebase.sh68
-rwxr-xr-xaosp-merger/branches_restore.sh48
-rwxr-xr-xaosp-merger/branches_save.sh35
-rw-r--r--best-caf-kernel/best-caf-kernel.py115
-rwxr-xr-xbuild-webview/build-webview.sh146
-rw-r--r--config-fs-gen/README.md164
-rwxr-xr-xconfig-fs-gen/config-fs-gen.py137
-rw-r--r--device-deps-regenerator/.gitignore2
-rw-r--r--device-deps-regenerator/README.md6
-rw-r--r--device-deps-regenerator/app.py130
-rw-r--r--device-deps-regenerator/device2kernel.py47
-rw-r--r--device-deps-regenerator/devices.py36
-rw-r--r--device-deps-regenerator/requirements.txt1
-rwxr-xr-xemoji-updater/emoji-updater.py30
-rwxr-xr-xkey-migration/export-keys.sh14
-rw-r--r--key-migration/migration.sh82
-rw-r--r--lineage-push/README.md37
-rwxr-xr-xlineage-push/lineage-push.py127
-rw-r--r--maintainer-checks/README.md2
-rwxr-xr-xmaintainer-checks/maintainers.py182
-rw-r--r--maintainer-checks/requirements.txt1
-rwxr-xr-xupdate-payload-extractor/extract.py (renamed from extract.py)0
-rw-r--r--update-payload-extractor/update_payload/__init__.py (renamed from update_payload/__init__.py)0
-rw-r--r--update-payload-extractor/update_payload/applier.py (renamed from update_payload/applier.py)0
-rw-r--r--update-payload-extractor/update_payload/checker.py (renamed from update_payload/checker.py)0
-rwxr-xr-xupdate-payload-extractor/update_payload/checker_unittest.py (renamed from update_payload/checker_unittest.py)0
-rw-r--r--update-payload-extractor/update_payload/common.py (renamed from update_payload/common.py)0
-rw-r--r--update-payload-extractor/update_payload/error.py (renamed from update_payload/error.py)0
-rw-r--r--update-payload-extractor/update_payload/format_utils.py (renamed from update_payload/format_utils.py)0
-rwxr-xr-xupdate-payload-extractor/update_payload/format_utils_unittest.py (renamed from update_payload/format_utils_unittest.py)0
-rw-r--r--update-payload-extractor/update_payload/histogram.py (renamed from update_payload/histogram.py)0
-rwxr-xr-xupdate-payload-extractor/update_payload/histogram_unittest.py (renamed from update_payload/histogram_unittest.py)0
-rw-r--r--update-payload-extractor/update_payload/payload-test-key.pem (renamed from update_payload/payload-test-key.pem)0
-rw-r--r--update-payload-extractor/update_payload/payload-test-key.pub (renamed from update_payload/payload-test-key.pub)0
-rw-r--r--update-payload-extractor/update_payload/payload.py (renamed from update_payload/payload.py)0
-rw-r--r--update-payload-extractor/update_payload/test_utils.py (renamed from update_payload/test_utils.py)0
-rw-r--r--update-payload-extractor/update_payload/update-payload-key.pub.pem (renamed from update_payload/update-payload-key.pub.pem)0
-rw-r--r--update-payload-extractor/update_payload/update_metadata_pb2.py (renamed from update_payload/update_metadata_pb2.py)0
40 files changed, 1567 insertions, 0 deletions
diff --git a/aosp-merger/README.md b/aosp-merger/README.md
new file mode 100644
index 0000000..2470cd5
--- /dev/null
+++ b/aosp-merger/README.md
@@ -0,0 +1,33 @@
+# Rough workflow
+
+1. Snapshot the names of your current working branches to `branches.list` file:
+
+ ./lineage/scripts/aosp-merger/branches_save.sh
+
+2. Note current aosp tag in `.repo/manifests/default.xml`, update it to desired new tag and then create a local commit for the change (aosp-merger script checks for any uncommitted changes in the `.repo/manifests` git repo).
+3. Create a staging branch and merge in the new AOSP tag:
+
+ ./lineage/scripts/aosp-merger/aosp-merger.sh merge \<oldaosptag> \<newaosptag>
+ (where oldaosptag is the original AOSP tag that was in `.repo/manifests/default.xml`)
+ * Example invocation:
+
+ ./lineage/scripts/aosp-merger/aosp-merger.sh merge android-8.0.0_r3 android-8.0.0_r30
+
+4. Every project in your tree should now be one of:
+ * \<newaosptag> if the project was tracking AOSP
+ * a staging branch if the project was a LineageOS fork from AOSP (check `merged_repos.txt` for status and whether there are conflicts to resolve)
+ * the default repo lineage branch for `.repo/manifests/snippets.xml` projects
+5. Restore your local branches and merge in the staging branch:
+
+ ./lineage/scripts/aosp-merger/branches_rebase.sh \<nameofstagingbranch>
+ * Example invocation:
+
+ ./lineage/scripts/aosp-merger/branches_rebase.sh staging/lineage-15.0_merge-android-8.0.0_r30
+6. Build, install, boot, verify, etc.
+
+# TODO
+
+* Make it work for rebase (I'm sure it'll need fixups).
+* Create squashed gerrits for each merge.
+* Abandon squashed gerrits and push each merge automatically.
+* DONE. Instead of merging the staging branch into your local branch (if you have one), create a new branch for the local+staging merge.
diff --git a/aosp-merger/aosp-merger.sh b/aosp-merger/aosp-merger.sh
new file mode 100755
index 0000000..1542ae7
--- /dev/null
+++ b/aosp-merger/aosp-merger.sh
@@ -0,0 +1,124 @@
+#!/bin/bash
+#
+# Copyright (C) 2017 The LineageOS Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+usage() {
+ echo "Usage ${0} <merge|rebase> <oldaosptag> <newaosptag>"
+}
+
+# Verify argument count
+if [ "$#" -ne 3 ]; then
+ usage
+ exit 1
+fi
+
+OPERATION="${1}"
+OLDTAG="${2}"
+NEWTAG="${3}"
+
+if [ "${OPERATION}" != "merge" -a "${OPERATION}" != "rebase" ]; then
+ usage
+ exit 1
+fi
+
+# Check to make sure this is being run from the top level repo dir
+if [ ! -e "build/envsetup.sh" ]; then
+ echo "Must be run from the top level repo dir"
+ exit 1
+fi
+
+# Source build environment (needed for aospremote)
+. build/envsetup.sh
+
+TOP="${ANDROID_BUILD_TOP}"
+MERGEDREPOS="${TOP}/merged_repos.txt"
+MANIFEST="${TOP}/.repo/manifest.xml"
+BRANCH=$(grep "default revision" "${MANIFEST}" \
+ | sed 's/^ *//g;s/<default revision=\"refs\/heads\///g;s/\"//g')
+STAGINGBRANCH="staging/${BRANCH}_${OPERATION}-${NEWTAG}"
+
+# Build list of LineageOS forked repos
+PROJECTPATHS=$(grep "name=\"LineageOS/" "${MANIFEST}" | sed -n 's/.*path="\([^"]\+\)".*/\1/p')
+
+echo "#### Old tag = ${OLDTAG} Branch = ${BRANCH} Staging branch = ${STAGINGBRANCH} ####"
+
+# Make sure manifest and forked repos are in a consistent state
+echo "#### Verifying there are no uncommitted changes on LineageOS forked AOSP projects ####"
+for PROJECTPATH in ${PROJECTPATHS} .repo/manifests; do
+ cd "${TOP}/${PROJECTPATH}"
+ if [[ -n "$(git status --porcelain)" ]]; then
+ echo "Path ${PROJECTPATH} has uncommitted changes. Please fix."
+ exit 1
+ fi
+done
+echo "#### Verification complete - no uncommitted changes found ####"
+
+# Remove any existing list of merged repos file
+rm -f "${MERGEDREPOS}"
+
+# Sync and detach from current branches
+repo sync -d
+
+# Ditch any existing staging branches (across all projects)
+repo abandon "${STAGINGBRANCH}"
+
+# Iterate over each forked project
+for PROJECTPATH in ${PROJECTPATHS}; do
+ cd "${TOP}/${PROJECTPATH}"
+ repo start "${STAGINGBRANCH}" .
+ aospremote | grep -v "Remote 'aosp' created"
+ git fetch -q --tags aosp "${NEWTAG}"
+
+ PROJECTOPERATION="${OPERATION}"
+
+ # Check if we've actually changed anything before attempting to merge
+ # If we haven't, just "git reset --hard" to the tag
+ if [[ -z "$(git diff HEAD ${OLDTAG})" ]]; then
+ git reset --hard "${NEWTAG}"
+ echo -e "reset\t\t${PROJECTPATH}" | tee -a "${MERGEDREPOS}"
+ continue
+ fi
+
+ # Was there any change upstream? Skip if not.
+ if [[ -z "$(git diff ${OLDTAG} ${NEWTAG})" ]]; then
+ echo -e "nochange\t\t${PROJECTPATH}" | tee -a "${MERGEDREPOS}"
+ continue
+ fi
+
+ # Determine whether OLDTAG is an ancestor of NEWTAG
+ # ie is history consistent.
+ git merge-base --is-ancestor "${OLDTAG}" "${NEWTAG}"
+ # If no, force rebase.
+ if [[ "$?" -eq 1 ]]; then
+ echo -n "#### Project ${PROJECTPATH} old tag ${OLD} is not an ancestor "
+ echo "of new tag ${NEWTAG}, forcing rebase ####"
+ PROJECTOPERATION="rebase"
+ fi
+
+ if [[ "${PROJECTOPERATION}" == "merge" ]]; then
+ echo "#### Merging ${NEWTAG} into ${PROJECTPATH} ####"
+ git merge --no-edit --log "${NEWTAG}"
+ elif [[ "${PROJECTOPERATION}" == "rebase" ]]; then
+ echo "#### Rebasing ${PROJECTPATH} onto ${NEWTAG} ####"
+ git rebase --onto "${NEWTAG}" "${OLDTAG}"
+ fi
+
+ CONFLICT=""
+ if [[ -n "$(git status --porcelain)" ]]; then
+ CONFLICT="conflict-"
+ fi
+ echo -e "${CONFLICT}${PROJECTOPERATION}\t\t${PROJECTPATH}" | tee -a "${MERGEDREPOS}"
+done
diff --git a/aosp-merger/branches_rebase.sh b/aosp-merger/branches_rebase.sh
new file mode 100755
index 0000000..1c9ec12
--- /dev/null
+++ b/aosp-merger/branches_rebase.sh
@@ -0,0 +1,68 @@
+#!/bin/bash
+#
+# Copyright (C) 2017 The LineageOS Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#####
+# Rebase your local working branches onto a new "upstream" branch.
+# Local branch list is defined in branches.list
+# (and can be created with branches_save.sh)
+# If the upstream branch doesn't exist (eg perhaps in lineage-sdk),
+# simply switch the working branch instead.
+
+if [ ! -e "build/envsetup.sh" ]; then
+ echo "Must run from root of repo"
+ exit 1
+fi
+
+if [ "$#" -ne 1 ]; then
+ echo "Usage ${0} <branch to rebase on top of>"
+ exit 1
+fi
+REBASEONTO="${1}"
+
+TOP="${PWD}"
+BRANCHLIST="${TOP}/branches.list"
+
+cat "${BRANCHLIST}" | while read l; do
+ set ${l}
+ PROJECTPATH="${1}"
+ BRANCH="${2}"
+ NEWBRANCH="${2}-rebase"
+ cd "${TOP}/${PROJECTPATH}"
+
+ # Sanity check
+ [[ -n "$(git status --porcelain)" ]]; then
+ echo -n "!!!! Project ${PROJECTPATH} has uncommitted files, "
+ echo "not switching to branch ${BRANCH} (skipping) !!!!"
+ continue
+ fi
+
+ # Check the $REBASEONTO branch actually exists
+ git show-ref "refs/heads/${REBASEONTO}" >/dev/null
+ if [ "$?" -ne 0 ]; then
+ # Nope
+ echo -n "#### Project ${PROJECTPATH} branch ${REBASEONTO} does not exist, "
+ echo "switching to ${BRANCH} instead ####"
+ git checkout "${BRANCH}"
+ else
+ echo "#### Creating ${PROJECTPATH} branch ${NEWBRANCH} from ${BRANCH} ####"
+ repo abandon "${NEWBRANCH}" .
+ repo start "${NEWBRANCH}" .
+ git reset --hard "${BRANCH}"
+ echo -n "#### Project ${PROJECTPATH} Rebasing branch ${NEWBRANCH} "
+ echo "on top of ${REBASEONTO} ####"
+ git rebase --onto "${REBASEONTO}"
+ fi
+done
diff --git a/aosp-merger/branches_restore.sh b/aosp-merger/branches_restore.sh
new file mode 100755
index 0000000..c10d626
--- /dev/null
+++ b/aosp-merger/branches_restore.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+#
+# Copyright (C) 2017 The LineageOS Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ ! -e "build/envsetup.sh" ]; then
+ echo "Must run from root of repo"
+ exit 1
+fi
+
+TOP="${PWD}"
+BRANCHLIST="${TOP}/branches.list"
+
+cat "${BRANCHLIST}" | while read l; do
+ set ${l}
+ PROJECTPATH="${1}"
+ BRANCH="${2}"
+ cd "${TOP}/${PROJECTPATH}"
+
+ # Check if we're on this branch already
+ CURBRANCH=$(git status -b --porcelain | head -1 | awk '{print $2}' | sed 's/\.\.\..*//')
+ if [ "${CURBRANCH}" == "${BRANCH}" ]; then
+ echo "#### Project ${PROJECTPATH} is already on branch ${BRANCH} ####"
+ continue
+ fi
+
+ # Sanity check
+ if [[ -n "$(git status --porcelain)" ]]; then
+ echo -n "#!#! Project ${PROJECTPATH} has uncommitted files, "
+ echo "not switching to branch ${BRANCH} #!#!"
+ exit 1
+ fi
+
+ echo "#### Project ${PROJECTPATH} Switching to branch ${BRANCH} ####"
+ git checkout "${BRANCH}"
+done
diff --git a/aosp-merger/branches_save.sh b/aosp-merger/branches_save.sh
new file mode 100755
index 0000000..a806430
--- /dev/null
+++ b/aosp-merger/branches_save.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+#
+# Copyright (C) 2017 The LineageOS Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+if [ ! -e "build/envsetup.sh" ]; then
+ echo "Must run from root of repo"
+ exit 1
+fi
+
+TOP="${PWD}"
+BRANCHLIST="${TOP}/branches.list"
+
+# Example repo status output:
+#project build/make/ branch x
+#project device/huawei/angler/ branch x
+
+repo status | grep '^project ' | while read l; do
+ set ${l}
+ PROJECTPATH=$(echo ${2} | sed 's|/$||')
+ BRANCH="${4}"
+ echo "${PROJECTPATH} ${BRANCH}"
+done | sort > "${BRANCHLIST}"
diff --git a/best-caf-kernel/best-caf-kernel.py b/best-caf-kernel/best-caf-kernel.py
new file mode 100644
index 0000000..540aac1
--- /dev/null
+++ b/best-caf-kernel/best-caf-kernel.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import sys
+import time
+from multiprocessing import Event, Pool, Process, Queue
+from subprocess import PIPE, Popen
+
+try:
+ from Queue import Empty as Queue_Empty
+except ImportError:
+ from queue import Empty as Queue_Empty
+
+
+def run_subprocess(cmd):
+ sp = Popen(cmd, stdout=PIPE, stderr=PIPE,
+ shell=True, universal_newlines=True)
+ comm = sp.communicate()
+ exit_code = sp.returncode
+ if exit_code != 0:
+ print("There was an error running the subprocess.\n"
+ "cmd: %s\n"
+ "exit code: %d\n"
+ "stdout: %s\n"
+ "stderr: %s" % (cmd, exit_code, comm[0], comm[1]))
+ return comm
+
+
+def get_tags(tag_name):
+ cmd = "git tag -l %s" % tag_name
+ comm = run_subprocess(cmd)
+ return comm[0].strip("\n").split("\n")
+
+
+def get_total_changes(tag_name):
+ cmd = "git diff %s --shortstat" % tag_name
+ comm = run_subprocess(cmd)
+ try:
+ a, d = comm[0].split(",")[1:]
+ a = int(a.strip().split()[0])
+ d = int(d.strip().split()[0])
+ except ValueError:
+ total = None
+ else:
+ total = a + d
+ return total
+
+
+def worker(tag_name):
+ tc = get_total_changes(tag_name)
+ worker.q.put((tag_name, tc))
+
+
+def worker_init(q):
+ worker.q = q
+
+
+def background(q, e, s):
+ best = 9999999999999
+ tag = ""
+ while True:
+ try:
+ tn, tc = q.get(False)
+ except Queue_Empty:
+ if e.is_set():
+ break
+ else:
+ if not s:
+ print("%s has %d lines changed" % (tn, tc))
+ if best > tc:
+ best = tc
+ tag = tn
+ if not s:
+ print("%s is the new best match with %d lines changed" % (tn, tc))
+ print("Best match")
+ print("TAG: %s" % tag)
+ print("Lines changed: %d" % best)
+
+
+def main():
+ import argparse # Only needed for main()
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-j", action="store", dest="jobs", default=1, type=int,
+ metavar="N", help="number of jobs to run at once")
+ parser.add_argument("-s", action="store_true", dest="silent", default=False,
+ help="reduce the verbosity of the output")
+ parser.add_argument("tag_name", metavar="<Tag Name>",
+ help="tag name to search for (can contain wildcards)")
+ args = parser.parse_args()
+
+ tags = get_tags(args.tag_name)
+ if not tags:
+ print("No tags to check. bailing.")
+ sys.exit(1)
+ if not args.silent:
+ print("number of tags to check: %d" % len(tags))
+
+ queue = Queue()
+ event = Event()
+
+ b = Process(target=background, args=(queue, event, args.silent))
+ b.start()
+
+ pool = Pool(args.jobs, worker_init, [queue])
+ pool.map(worker, tags)
+
+ pool.close()
+ pool.join()
+ event.set()
+ b.join()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/build-webview/build-webview.sh b/build-webview/build-webview.sh
new file mode 100755
index 0000000..a8d4061
--- /dev/null
+++ b/build-webview/build-webview.sh
@@ -0,0 +1,146 @@
+#!/bin/bash
+
+set -e
+
+chromium_version="80.0.3987.132"
+chromium_code="3987132"
+clean=0
+gsync=0
+supported_archs=(arm arm64 x86 x64)
+
+usage() {
+ echo "Usage:"
+ echo " build_webview [ options ]"
+ echo
+ echo " Options:"
+ echo " -a <arch> Build specified arch"
+ echo " -c Clean"
+ echo " -h Show this message"
+ echo " -r <release> Specify chromium release"
+ echo " -s Sync"
+ echo
+ echo " Example:"
+ echo " build_webview -c -r $chromium_version:$chromium_code"
+ echo
+ exit 1
+}
+
+build() {
+ build_args=$args' target_cpu="'$1'"'
+
+ code=$chromium_code
+ if [ $1 '==' "arm" ]; then
+ code+=00
+ elif [ $1 '==' "arm64" ]; then
+ code+=50
+ elif [ $1 '==' "x86" ]; then
+ code+=10
+ elif [ $1 '==' "x64" ]; then
+ code+=60
+ fi
+ build_args+=' android_default_version_code="'$code'"'
+
+ gn gen "out/$1" --args="$build_args"
+ ninja -C out/$1 system_webview_apk
+ if [ "$?" -eq 0 ]; then
+ [ "$1" '==' "x64" ] && android_arch="x86_64" || android_arch=$1
+ cp out/$1/apks/SystemWebView.apk ../android_external_chromium-webview/prebuilt/$android_arch/webview.apk
+ fi
+}
+
+while getopts ":a:chr:s" opt; do
+ case $opt in
+ a) for arch in ${supported_archs[@]}; do
+ [ "$OPTARG" '==' "$arch" ] && build_arch="$OPTARG" || ((arch_try++))
+ done
+ if [ $arch_try -eq ${#supported_archs[@]} ]; then
+ echo "Unsupported ARCH: $OPTARG"
+ echo "Supported ARCHs: ${supported_archs[@]}"
+ exit 1
+ fi
+ ;;
+ c) clean=1 ;;
+ h) usage ;;
+ r) version=(${OPTARG//:/ })
+ chromium_version=$version[1]
+ chromium_code=$version[2]
+ ;;
+ s) gsync=1 ;;
+ :)
+ echo "Option -$OPTARG requires an argument"
+ echo
+ usage
+ ;;
+ \?)
+ echo "Invalid option:-$OPTARG"
+ echo
+ usage
+ ;;
+ esac
+done
+shift $((OPTIND-1))
+
+# Download android_external_chromium-webview
+if [ ! -d android_external_chromium-webview ]; then
+ git clone https://github.com/LineageOS/android_external_chromium-webview.git --depth 1
+fi
+
+# Add depot_tools to PATH
+if [ ! -d depot_tools ]; then
+ git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git
+fi
+export PATH="$(pwd -P)/depot_tools:$PATH"
+
+if [ ! -d src ]; then
+ fetch android
+ yes | gclient sync -D -R -r $chromium_version
+fi
+
+if [ $gsync -eq 1 ]; then
+ find src -name index.lock -delete
+ yes | gclient sync -R -r $chromium_version
+fi
+cd src
+
+# Replace webview icon
+mkdir -p android_webview/nonembedded/java/res_icon/drawable-xxxhdpi
+cp chrome/android/java/res_chromium/mipmap-mdpi/app_icon.png android_webview/nonembedded/java/res_icon/drawable-mdpi/icon_webview.png
+cp chrome/android/java/res_chromium/mipmap-hdpi/app_icon.png android_webview/nonembedded/java/res_icon/drawable-hdpi/icon_webview.png
+cp chrome/android/java/res_chromium/mipmap-xhdpi/app_icon.png android_webview/nonembedded/java/res_icon/drawable-xhdpi/icon_webview.png
+cp chrome/android/java/res_chromium/mipmap-xxhdpi/app_icon.png android_webview/nonembedded/java/res_icon/drawable-xxhdpi/icon_webview.png
+cp chrome/android/java/res_chromium/mipmap-xxxhdpi/app_icon.png android_webview/nonembedded/java/res_icon/drawable-xxxhdpi/icon_webview.png
+
+# Apply our patches
+if [ $gsync -eq 1 ]; then
+ git am ../android_external_chromium-webview/patches/*
+fi
+
+# Build args
+args='target_os="android"'
+args+=' is_debug=false'
+args+=' is_official_build=true'
+args+=' is_chrome_branded=false'
+args+=' use_official_google_api_keys=false'
+args+=' ffmpeg_branding="Chrome"'
+args+=' proprietary_codecs=true'
+args+=' enable_resource_whitelist_generation=false'
+args+=' enable_remoting=true'
+args+=' is_component_build=false'
+args+=' symbol_level=0'
+args+=' enable_nacl=false'
+args+=' blink_symbol_level = 0'
+args+=' android_default_version_name="'$chromium_version'"'
+
+# Setup environment
+[ $clean -eq 1 ] && rm -rf out
+. build/android/envsetup.sh
+
+# Check target and build
+if [ -n "$build_arch" ]; then
+ build $build_arch
+else
+ build arm
+ build arm64
+ build x86
+ build x64
+fi
diff --git a/config-fs-gen/README.md b/config-fs-gen/README.md
new file mode 100644
index 0000000..71f8859
--- /dev/null
+++ b/config-fs-gen/README.md
@@ -0,0 +1,164 @@
+# config-fs-gen
+
+```
+usage: config-fs-gen.py [-h]
+ capability_header_path
+ android_filesystem_config_header_path
+ vendor_group_path fs_config_paths
+ [fs_config_paths ...]
+
+Convert /vendor/etc/group ×
+/(system|vendor)/etc/(fs_config_dirs|fs_config_files) to config.fs
+
+positional arguments:
+ capability_header_path
+ path to
+ {android}/bionic/libc/kernel/uapi/linux/capability.h
+ android_filesystem_config_header_path
+ path to {android}/system/core/libcutils/include/privat
+ e/android_filesystem_config.h
+ vendor_group_path path to {rom}/vendor/etc/group
+ fs_config_paths paths to
+ {rom}/(system|vendor)/etc/fs_config_(dirs|files)
+
+optional arguments:
+ -h, --help show this help message and exit
+```
+```
+ Example usage:
+ $ ./config-fs-gen.py ~/lineage-16.0/bionic/libc/kernel/uapi/linux/capability.h \
+ ~/lineage-16.0/system/core/libcutils/include/private/android_filesystem_config.h \
+ ~/lineage-16.0/out/target/product/guacamole/vendor/etc/group \
+ ~/lineage-16.0/out/target/product/guacamole/{system,vendor}/etc/{fs_config_dirs,fs_config_files}
+ [AID_VENDOR_QTI_DIAG]
+ value:2901
+
+ [AID_VENDOR_QDSS]
+ value:2902
+
+ [AID_VENDOR_RFS]
+ value:2903
+
+ [AID_VENDOR_RFS_SHARED]
+ value:2904
+
+ [AID_VENDOR_ADPL_ODL]
+ value:2905
+
+ [AID_VENDOR_QRTR]
+ value:2906
+
+ [bt_firmware/]
+ mode: 0771
+ user: AID_SYSTEM
+ group: AID_SYSTEM
+ caps: 0
+
+ [dsp/]
+ mode: 0771
+ user: AID_MEDIA
+ group: AID_MEDIA
+ caps: 0
+
+ [firmware/]
+ mode: 0771
+ user: AID_SYSTEM
+ group: AID_SYSTEM
+ caps: 0
+
+ [firmware/image/*]
+ mode: 0771
+ user: AID_SYSTEM
+ group: AID_SYSTEM
+ caps: 0
+
+ [persist/]
+ mode: 0771
+ user: AID_SYSTEM
+ group: AID_SYSTEM
+ caps: 0
+
+ [vendor/bin/cnd]
+ mode: 0755
+ user: AID_SYSTEM
+ group: AID_SYSTEM
+ caps: NET_BIND_SERVICE NET_ADMIN BLOCK_SUSPEND
+
+ [vendor/bin/hw/android.hardware.bluetooth@1.0-service-qti]
+ mode: 0755
+ user: AID_BLUETOOTH
+ group: AID_BLUETOOTH
+ caps: NET_ADMIN BLOCK_SUSPEND
+
+ [vendor/bin/ims_rtp_daemon]
+ mode: 0755
+ user: AID_SYSTEM
+ group: AID_RADIO
+ caps: NET_BIND_SERVICE
+
+ [vendor/bin/imsdatadaemon]
+ mode: 0755
+ user: AID_SYSTEM
+ group: AID_SYSTEM
+ caps: NET_BIND_SERVICE
+
+ [vendor/bin/imsrcsd]
+ mode: 0755
+ user: AID_SYSTEM
+ group: AID_RADIO
+ caps: NET_BIND_SERVICE WAKE_ALARM BLOCK_SUSPEND
+
+ [vendor/bin/loc_launcher]
+ mode: 0755
+ user: AID_GPS
+ group: AID_GPS
+ caps: SETGID SETUID
+
+ [vendor/bin/pd-mapper]
+ mode: 0755
+ user: AID_SYSTEM
+ group: AID_SYSTEM
+ caps: NET_BIND_SERVICE
+
+ [vendor/bin/pm-service]
+ mode: 0755
+ user: AID_SYSTEM
+ group: AID_SYSTEM
+ caps: NET_BIND_SERVICE
+
+ [vendor/bin/sensors.qti]
+ mode: 0755
+ user: AID_SYSTEM
+ group: AID_SYSTEM
+ caps: NET_BIND_SERVICE
+
+ [vendor/bin/slim_daemon]
+ mode: 0755
+ user: AID_GPS
+ group: AID_GPS
+ caps: NET_BIND_SERVICE
+
+ [vendor/bin/wcnss_filter]
+ mode: 0755
+ user: AID_BLUETOOTH
+ group: AID_BLUETOOTH
+ caps: BLOCK_SUSPEND
+
+ [vendor/bin/xtwifi-client]
+ mode: 0755
+ user: AID_GPS
+ group: AID_GPS
+ caps: NET_BIND_SERVICE WAKE_ALARM BLOCK_SUSPEND
+
+ [vendor/firmware_mnt/image/*]
+ mode: 0771
+ user: AID_ROOT
+ group: AID_SYSTEM
+ caps: 0
+
+ [vendor/lib/modules-aging/*]
+ mode: 0644
+ user: AID_ROOT
+ group: AID_ROOT
+ caps: 0
+```
diff --git a/config-fs-gen/config-fs-gen.py b/config-fs-gen/config-fs-gen.py
new file mode 100755
index 0000000..2e87d68
--- /dev/null
+++ b/config-fs-gen/config-fs-gen.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from __future__ import print_function
+
+import argparse
+import parse
+import struct
+
+
+def parse_cmdline():
+ parser = argparse.ArgumentParser(
+ description='Convert /vendor/etc/group × /(system|vendor)/etc/(fs_config_dirs|fs_config_files) to config.fs')
+ parser.add_argument('capability_header_path',
+ help='path to {android}/bionic/libc/kernel/uapi/linux/capability.h')
+ parser.add_argument('android_filesystem_config_header_path',
+ help='path to {android}/system/core/libcutils/include/private/android_filesystem_config.h')
+ parser.add_argument('vendor_group_path',
+ help='path to {rom}/vendor/etc/group')
+ parser.add_argument('fs_config_paths', nargs='+',
+ help='paths to {rom}/(system|vendor)/etc/fs_config_(dirs|files)')
+ return parser.parse_args()
+
+
+def get_capabilities(capability_header_path):
+ capabilities = {}
+
+ with open(capability_header_path, 'r') as file:
+ for line in file:
+ s = parse.search('#define CAP_{:w} {:d}', line)
+
+ if s is not None:
+ capabilities[s[1]] = s[0]
+
+ return capabilities
+
+
+def get_groups(android_filesystem_config_header_path, vendor_group_path):
+ system_groups = {}
+ vendor_groups = {}
+
+ with open(android_filesystem_config_header_path, 'r') as file:
+ for line in file:
+ s = parse.search('#define AID_{:w} {:d}', line)
+
+ if s is not None:
+ system_groups[s[1]] = 'AID_' + s[0]
+
+ with open(vendor_group_path, 'r') as file:
+ for line in file:
+ name, _, uid, _ = line.split(':', 3)
+ vendor_groups[uid] = 'AID_' + name.upper()
+
+ return system_groups, vendor_groups
+
+
+def get_fs_path_configs(fs_config_paths, system_groups, vendor_groups):
+ fs_path_config = {}
+
+ for fs_config_path in args.fs_config_paths:
+ with open(fs_config_path, 'rb') as file:
+ while True:
+ bytes = file.read(struct.calcsize('<HHHHQ'))
+
+ if bytes is b'':
+ break
+
+ length, mode, uid, gid, caps = struct.unpack('<HHHHQ', bytes)
+ name = file.read(length - len(bytes)).decode().rstrip('\x00')
+
+ fs_path_config[name] = {
+ 'mode': mode,
+ 'user': gid_to_str(uid, system_groups, vendor_groups),
+ 'group': gid_to_str(gid, system_groups, vendor_groups),
+ 'caps': caps_to_str(caps)
+ }
+
+ return fs_path_config
+
+
+def caps_to_str(caps):
+ caps_list = []
+
+ # return '0' directly if there are no special capabilities set
+ if caps == 0:
+ return str(caps)
+
+ # try to match well known linux capabilities
+ for cap in capabilities:
+ cap_mask_long = 1 << cap
+
+ if caps & cap_mask_long:
+ caps = caps & ~cap_mask_long
+ caps_list.append(capabilities[cap])
+
+ # append unmatched caps if needed
+ if caps > 0:
+ caps_list.append(str(caps))
+
+ return ' '.join(caps_list)
+
+
+def gid_to_str(gid, system_groups, vendor_groups):
+ if gid in system_groups:
+ return system_groups[gid]
+
+ if gid in vendor_groups:
+ return vendor_groups[gid]
+
+ return gid
+
+
+if __name__ == '__main__':
+ args = parse_cmdline()
+ capabilities = get_capabilities(args.capability_header_path)
+ system_groups, vendor_groups = get_groups(
+ args.android_filesystem_config_header_path,
+ args.vendor_group_path)
+ fs_path_configs = get_fs_path_configs(
+ args.fs_config_paths,
+ system_groups,
+ vendor_groups)
+
+ # print vendor AIDs
+ for gid in sorted(vendor_groups):
+ print('[{}]'.format(vendor_groups[gid]))
+ print('value:{}'.format(gid))
+ print()
+
+ # print {system,vendor} fs path configs
+ for name in sorted(fs_path_configs):
+ print('[{}]'.format(name))
+ print('mode: {:04o}'.format(fs_path_configs[name]['mode']))
+ print('user: {}'.format(fs_path_configs[name]['user']))
+ print('group: {}'.format(fs_path_configs[name]['group']))
+ print('caps: {}'.format(fs_path_configs[name]['caps']))
+ print()
diff --git a/device-deps-regenerator/.gitignore b/device-deps-regenerator/.gitignore
new file mode 100644
index 0000000..409a616
--- /dev/null
+++ b/device-deps-regenerator/.gitignore
@@ -0,0 +1,2 @@
+token
+*.json
diff --git a/device-deps-regenerator/README.md b/device-deps-regenerator/README.md
new file mode 100644
index 0000000..03ac5d3
--- /dev/null
+++ b/device-deps-regenerator/README.md
@@ -0,0 +1,6 @@
+1. Use Python 3.2 or higher
+2. Run `pip3 install -r requirements.txt`
+3. Grab a new token from [here](https://github.com/settings/tokens) - no scopes needed, just a name. Put it in `token`
+4. run `python3 app.py` to generate the full lineage.dependencies mapping
+5. run `python3 device2kernel.py` to generate kernel -> devices mapping (like cve_tracker/kernels.json)
+6. run `python3 devices.py` to generate device -> dependency mapping (like lineageos_updater/device_deps.json)
diff --git a/device-deps-regenerator/app.py b/device-deps-regenerator/app.py
new file mode 100644
index 0000000..3a7ae00
--- /dev/null
+++ b/device-deps-regenerator/app.py
@@ -0,0 +1,130 @@
+import argparse
+import concurrent.futures
+import github
+import json
+import traceback
+
+from github import Github
+from base64 import b64decode
+
+parser = argparse.ArgumentParser()
+parser.add_argument('-j', '--jobs', type=int, help='Max number of workers to use. Default is none')
+args = parser.parse_args()
+
+with open('token') as f:
+ g = Github(f.readline().strip(), per_page=200)
+
+
+print(g.rate_limiting_resettime)
+
+org = g.get_organization('LineageOS')
+
+# supported branches, newest to oldest
+CUR_BRANCHES = ['lineage-16.0', 'lineage-15.1']
+
+def get_cm_dependencies(repo):
+ branch = None
+ for b in CUR_BRANCHES:
+ try:
+ branch = repo.get_branch(b)
+ break
+ except github.GithubException:
+ continue
+
+ if branch is None:
+ return None
+
+ sha = branch.commit.sha
+ try:
+ tree = repo.get_git_tree(sha)
+ except github.GithubException:
+ return None
+ blob_sha = None
+ for el in tree.tree:
+ if el.path == 'cm.dependencies' or el.path == 'lineage.dependencies':
+ blob_sha = el.sha
+ break
+
+ if blob_sha is None:
+ return [[], set()]
+
+ blob = repo.get_git_blob(blob_sha)
+
+ deps = b64decode(blob.content)
+
+ cmdeps = json.loads(deps.decode('utf-8'))
+
+ mydeps = []
+ non_device_repos = set()
+ for el in cmdeps:
+ if '_device_' not in el['repository']:
+ non_device_repos.add(el['repository'])
+ depbranch = el.get('branch', branch.name)
+ mydeps.append({'repo': el['repository'], 'branch': depbranch})
+
+ return [mydeps, non_device_repos]
+
+futures = {}
+n = 1
+
+dependencies = {}
+other_repos = set()
+
+with concurrent.futures.ThreadPoolExecutor(max_workers=args.jobs) as executor:
+ for repo in g.get_organization('LineageOS').get_repos():
+ if '_device_' not in repo.name:
+ continue
+ print(n, repo.name)
+ n += 1
+ futures[executor.submit(get_cm_dependencies, repo)] = repo.name
+ for future in concurrent.futures.as_completed(futures):
+ name = futures[future]
+ try:
+ data = future.result()
+ if data is None:
+ continue
+ dependencies[name] = data[0]
+ other_repos.update(data[1])
+ print(name, "=>", data[0])
+ except Exception as e:
+ print('%r generated an exception: %s'%(name, e))
+ traceback.print_exc()
+ continue
+ futures = {}
+
+ print(other_repos)
+ for name in other_repos:
+ print(name)
+ try:
+ repo = org.get_repo(name)
+ futures[executor.submit(get_cm_dependencies, repo)] = name
+ except Exception:
+ continue
+
+ other_repos = {}
+ for future in concurrent.futures.as_completed(futures):
+ name = futures[future]
+ try:
+ data = future.result()
+ if data is None:
+ continue
+ dependencies[name] = data[0]
+ for el in data[1]:
+ if el in dependencies:
+ continue
+ other_repos.update(data[1])
+ print(name, "=>", data[0])
+ except Exception as e:
+ print('%r generated an exception: %s'%(name, e))
+ traceback.print_exc()
+ continue
+ futures = {}
+
+
+print(other_repos)
+#for name in other_repos:
+# repo = org.get_repo(name)
+# dependencies[name] = get_cm_dependencies(repo)
+
+with open('out.json', 'w') as f:
+ json.dump(dependencies, f, indent=4)
diff --git a/device-deps-regenerator/device2kernel.py b/device-deps-regenerator/device2kernel.py
new file mode 100644
index 0000000..0a96361
--- /dev/null
+++ b/device-deps-regenerator/device2kernel.py
@@ -0,0 +1,47 @@
+import json
+
+# Define device repos that have repos that depend on them,
+# otherwise the script will remove these on the assumption
+# they are common repos
+COMMON_DEVICE = [
+ 'android_device_asus_flo',
+ 'android_device_asus_grouper',
+ 'android_device_google_marlin',
+ 'android_device_htc_flounder',
+ 'android_device_samsung_espressowifi',
+ 'android_device_samsung_n1awifi',
+ 'android_device_samsung_t0lte',
+]
+
+with open('out.json') as f:
+ mapping = json.load(f)
+
+kernels = {}
+
+reverse_deps = {}
+
+for device in mapping:
+ deps = mapping[device]
+ if device not in reverse_deps:
+ reverse_deps[device] = []
+ for repo in deps:
+ if repo['repo'] not in reverse_deps:
+ reverse_deps[repo['repo']] = []
+ reverse_deps[repo['repo']].append(device)
+
+def simplify_reverse_deps(repo):
+ if len(reverse_deps[repo]) == 0 and '-common' not in repo:
+ return {repo,}
+ res = set()
+ for i in reverse_deps[repo]:
+ res.update(simplify_reverse_deps(i))
+ if repo in COMMON_DEVICE:
+ res.add(repo)
+ return res
+
+for repo in reverse_deps:
+ if 'kernel' in repo:
+ kernels[repo] = sorted(list(simplify_reverse_deps(repo)))
+
+with open('kernels.json', 'w') as f:
+ json.dump(kernels, f, indent=4, sort_keys=True)
diff --git a/device-deps-regenerator/devices.py b/device-deps-regenerator/devices.py
new file mode 100644
index 0000000..cbfa0f3
--- /dev/null
+++ b/device-deps-regenerator/devices.py
@@ -0,0 +1,36 @@
+import json
+
+with open('out.json') as f:
+ mapping = json.load(f)
+
+devices = {}
+suffixes = {}
+blacklist = ['sepolicy', 'devicesettings', 'common', 'atv']
+
+def simplify_reverse_deps(repo, device):
+ # repo['branch'] = cm-14.1 or cm-14.1-caf or cm-14.1-sony
+ if 'branch' in repo and repo['branch'].count('-') > 1: # get suffix
+ if repo['repo'] not in suffixes:
+ suffixes[repo['repo']] = {}
+ suffixes[repo['repo']][device] = '-' + repo['branch'].split('-', 2)[2]
+
+ if repo['repo'] not in mapping or len(mapping[repo['repo']]) == 0:
+ return [repo['repo']]
+ res = []
+ for i in mapping[repo['repo']]:
+ res += (simplify_reverse_deps(i, device))
+ res.append(repo['repo'])
+ return res
+
+for repo in mapping:
+ if 'device' not in repo or any(x in repo for x in blacklist):
+ continue
+ codename = repo.split('_', maxsplit=3)[-1]
+ if codename in devices:
+ print("warning: dupe: %s"%codename)
+ devices[codename] = sorted(list(set(simplify_reverse_deps({'repo': repo}, codename))))
+
+with open('device_deps.json', 'w') as f:
+ out = {'devices': devices, 'suffixes': suffixes}
+ out = devices
+ json.dump(out, f, indent=4, sort_keys=True)
diff --git a/device-deps-regenerator/requirements.txt b/device-deps-regenerator/requirements.txt
new file mode 100644
index 0000000..b81016f
--- /dev/null
+++ b/device-deps-regenerator/requirements.txt
@@ -0,0 +1 @@
+pygithub
diff --git a/emoji-updater/emoji-updater.py b/emoji-updater/emoji-updater.py
new file mode 100755
index 0000000..a0293c7
--- /dev/null
+++ b/emoji-updater/emoji-updater.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+import sys
+
+import requests
+from lxml import etree
+
+if __name__ == '__main__':
+ if len(sys.argv) < 2:
+ sys.exit(f'usage: {sys.argv[0]} [url|https://unicode.org/emoji/charts-12.0/full-emoji-list.html]')
+
+ url = sys.argv[1]
+ req = requests.get(url=url)
+
+ parser = etree.HTMLParser(recover=True, encoding='utf-8')
+ doc = etree.fromstring(text=req.content, parser=parser)
+
+ for tr in doc.xpath('.//tr'):
+ mediumhead = tr.xpath('.//th[@class="mediumhead"]/a')
+
+ if len(mediumhead) > 0:
+ print(f' <!-- {mediumhead[0].text} -->')
+ continue
+
+ code = tr.xpath('.//td[@class="code"]/a')
+
+ if len(code) > 0:
+ codes = ','.join([x[2:] for x in code[0].text.split()])
+ print(f' <item>{codes}</item>')
+ continue
diff --git a/key-migration/export-keys.sh b/key-migration/export-keys.sh
new file mode 100755
index 0000000..d0c4ba8
--- /dev/null
+++ b/key-migration/export-keys.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+if ! cd "$1"; then
+ echo "USAGE: $0 PATH"
+ exit 1
+fi
+
+for x in platform media shared; do
+ echo ${x}_key_release=\"$(openssl x509 -pubkey -noout -in $x.x509.pem | grep -v '-' | tr -d '\n')\"
+ echo ${x}_cert_release=\"$(openssl x509 -outform der -in $x.x509.pem | xxd -p | tr -d '\n')\"
+done
+
+echo release_key=\"$(openssl x509 -pubkey -noout -in releasekey.x509.pem | grep -v '-' | tr -d '\n')\"
+echo release_cert=\"$(openssl x509 -outform der -in releasekey.x509.pem | xxd -p | tr -d '\n')\"
diff --git a/key-migration/migration.sh b/key-migration/migration.sh
new file mode 100644
index 0000000..7bf8cbe
--- /dev/null
+++ b/key-migration/migration.sh
@@ -0,0 +1,82 @@
+#!/system/bin/sh
+
+# Release keys
+media_cert_release='308203b33082029ba003020102020900e1c286d5b6096213300d06092a864886f70d01010505003070310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c6531123010060355040a0c094c696e656167654f5331123010060355040b0c094c696e656167654f533112301006035504030c094c696e656167654f53301e170d3137303130373034323132385a170d3434303532353034323132385a3070310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c6531123010060355040a0c094c696e656167654f5331123010060355040b0c094c696e656167654f533112301006035504030c094c696e656167654f5330820122300d06092a864886f70d01010105000382010f003082010a0282010100cc818ef9a14c1ae3b79fb0805f59aff780e9a8b28bddf9c71703af8ffc15dc30a8439a6ebbfea3b2ca429cd07b1feb6a3b33446688964cf70f23e9637d91ef0a5654c2d4261487fdb9bb0b01abfd52f5edc2aec205a7849b0f66ac25d9e434a43e8a5cc75f11df6847c5f7d9fdad45ef0f6bf53f8c7bb73319e6a28bfee7e06e97d5d2b7d53c8cf2d2638ede5404dfd27cc39c7631c9a52eb632f79cb108b50d0e136bd639d19b4f05e5c840680a0241b86fd702c2c44b2f196e306eac2dc40066f1ea1e43496b1ba4c85fedfd68374064f139a5630dde0c84a0e9dbb7f953562fe400ff02bcc6aadfda63fd572e473da83e22bf21c1421fefb3c3707d5a97630203010001a350304e301d0603551d0e04160414fbc481032a0bed6d36b8d11d46e0ef18ebfc06b2301f0603551d23041830168014fbc481032a0bed6d36b8d11d46e0ef18ebfc06b2300c0603551d13040530030101ff300d06092a864886f70d010105050003820101007dd63789ed7427ca371673242b6c5f7fb669bcb6f681ad833e413bcba56be119df42ae035681d806a7c6fb8db17a09120b7dcde5b28752466e6afaf57339f6d9d6ef74f687535021e7c33c0c8c99bd10bcd587c2856b8ba1318ea9616aee74b6d60efd15a59014c190b9c8bb79ad942db520ff0c2fd7ebef7f68424ab9a375f7768a24a8c0f954de6c31c3880d87ec8e518daf51466a78c3182e8fcfe5774e217309b27e6110f84f0bbe89d15d21d77c4c269e14e9d88619145fdf43aa17b54388006cb8bf648001951bc5a1f2b308f6f5b1c929fa522406a84b253c3032187fa5480aaf3de4eba86b2bc43371b5eb2bf26f1ce17ab4413a5793087e41c10f05'
+media_key_release='MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzIGO+aFMGuO3n7CAX1mv94DpqLKL3fnHFwOvj/wV3DCoQ5puu/6jsspCnNB7H+tqOzNEZoiWTPcPI+ljfZHvClZUwtQmFIf9ubsLAav9UvXtwq7CBaeEmw9mrCXZ5DSkPopcx18R32hHxffZ/a1F7w9r9T+Me7czGeaii/7n4G6X1dK31TyM8tJjjt5UBN/SfMOcdjHJpS62MvecsQi1DQ4Ta9Y50ZtPBeXIQGgKAkG4b9cCwsRLLxluMG6sLcQAZvHqHkNJaxukyF/t/Wg3QGTxOaVjDd4MhKDp27f5U1Yv5AD/ArzGqt/aY/1XLkc9qD4ivyHBQh/vs8NwfVqXYwIDAQAB'
+platform_cert_release='308203b33082029ba003020102020900d94a816d624a0050300d06092a864886f70d01010505003070310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c6531123010060355040a0c094c696e656167654f5331123010060355040b0c094c696e656167654f533112301006035504030c094c696e656167654f53301e170d3137303130373034323132365a170d3434303532353034323132365a3070310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c6531123010060355040a0c094c696e656167654f5331123010060355040b0c094c696e656167654f533112301006035504030c094c696e656167654f5330820122300d06092a864886f70d01010105000382010f003082010a02820101009a41d7715c8ce053804b1260bbad6c332def57bfd3d26b26102d8892ce9a3d0b780b3bf4cfc5778303470110cde8f6be1766147128731ec202977b62f1b4ae9b0e331a334a27c84c31b5885c4bd4650423cae821c7e08522515f63a9d3667f8c8b33cc9c322704e4645e37b13a50a8d04d3b04dcd0e1a87efbc32e7172f296df8ea33fcb071f9868d91a462e894fe49670ae27354dbfd14a83a23e6c775ae40761c91e416b042ceff81cf66d9e4d73c4bce7e14d6347dcbeab38820316be81decda9a7d5de8da71f1fe0e64a570da004279a6ea2628de4a839a7fdd9baa2d1c5445c6a6e7ed1949a18cd5a23cf602d7dd3d8cf17c953aa525454f17eaef84ea50203010001a350304e301d0603551d0e04160414609957d0ee9881d4d84690e80386c723b738d223301f0603551d23041830168014609957d0ee9881d4d84690e80386c723b738d223300c0603551d13040530030101ff300d06092a864886f70d0101050500038201010063e903bdcb6ee9c7767a170080caccea08a96d7f732fecdd261acdab1f651c701593051408ef0aec2de88fd7e8e7502815cc04a8b47ce35fba0179d0a3f0e7c7e0fc94cce049c34f3601e525b1b92ab19a2618749da33921b090321cff9f146eab1abd9166021a022f003e1034d0c7ba2e3937cfcd75700575fc9d542643924b4d869716eec2c226b1947bc11fe3cd45e3ba27bd52724e30fb69174d6d87199baa425642ab5b2da659ed9da32809311cbc217d1a72f60e8257a191083f210a909507fa08ce8a467b0434fee41a9c068ad0ada084ca87290f0c70b7f16e0d22a31a5dda2a0de059e63257126191a55523d714096582028e99b089b213e71467b2'
+platform_key_release='MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmkHXcVyM4FOASxJgu61sMy3vV7/T0msmEC2Iks6aPQt4Czv0z8V3gwNHARDN6Pa+F2YUcShzHsICl3ti8bSumw4zGjNKJ8hMMbWIXEvUZQQjyughx+CFIlFfY6nTZn+MizPMnDInBORkXjexOlCo0E07BNzQ4ah++8MucXLylt+Ooz/LBx+YaNkaRi6JT+SWcK4nNU2/0UqDoj5sd1rkB2HJHkFrBCzv+Bz2bZ5Nc8S85+FNY0fcvqs4ggMWvoHezamn1d6Npx8f4OZKVw2gBCeabqJijeSoOaf92bqi0cVEXGpuftGUmhjNWiPPYC1909jPF8lTqlJUVPF+rvhOpQIDAQAB'
+shared_cert_release='308203b33082029ba003020102020900dacb4739a1727e6e300d06092a864886f70d01010505003070310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c6531123010060355040a0c094c696e656167654f5331123010060355040b0c094c696e656167654f533112301006035504030c094c696e656167654f53301e170d3137303130373034323132375a170d3434303532353034323132375a3070310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c6531123010060355040a0c094c696e656167654f5331123010060355040b0c094c696e656167654f533112301006035504030c094c696e656167654f5330820122300d06092a864886f70d01010105000382010f003082010a028201010096f2f8f3dad19ee79f28256693ce970bda272f72025b4c4a6c20e027a921622530e1b780f9117c98f10b910259005a27da5020d9345844acec3da13ee0d684e067958a23dfc751ec497f5e3f3b13023c5428edc53aadbc44c10bfa84abd436bf13f8222ebf724f7e2e557e81effe21faa52b88899108cd62b7d5a84821576ed2edf50b83324a93439185b078711974395763265f7465b6bb908b0139489707cd4e76e4937545751f6b9c90d7b4bd17a234613d0919ca66e34d78906d5eaa43409e7c57d74266996f2e3f1d5a6061b65fb19f85c62829a8b3978541e64b3fa4f9febcbf5b291eaa433774c45749f73e12d377b9f9b0dfd0d7d8003a78c144a2f50203010001a350304e301d0603551d0e04160414b4dc1c6737cee40f786a398c8bb345d8a8eecbc2301f0603551d23041830168014b4dc1c6737cee40f786a398c8bb345d8a8eecbc2300c0603551d13040530030101ff300d06092a864886f70d010105050003820101002002b4557df909ace023d050f6c739b57ef311ce39abffda1b74161a71554ab78fd23b2b4b57d8c31e646f71f9063e1486a0502da00cfe4d87c4a75dd1eec6416b4460adace72a1cde79ea39f95e4fa3eb1777b9c2f080438298b963d21b54aa3aa7fc6f1b9717333c7b5244ac7862f07c34f443a72462141742f309f51e349fe21b21affcb5369c8723eef2ad2f1452b6bce685337a2751d3bd33eabece08bd62ed6801e6e331eaba7ae0a6dfb632a4fe184141013869bbdbd0d564a44091349890e178f755d14d08d6d868e4abdaa27759fcd576b1f2026db630d11639fd9672d54f0c15b03dc754c5c11ff168d3dd6b2dc7b58e7f47f472f684c41c87bd9e'
+shared_key_release='MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAlvL489rRnuefKCVmk86XC9onL3ICW0xKbCDgJ6khYiUw4beA+RF8mPELkQJZAFon2lAg2TRYRKzsPaE+4NaE4GeViiPfx1HsSX9ePzsTAjxUKO3FOq28RMEL+oSr1Da/E/giLr9yT34uVX6B7/4h+qUriImRCM1it9WoSCFXbtLt9QuDMkqTQ5GFsHhxGXQ5V2MmX3RltruQiwE5SJcHzU525JN1RXUfa5yQ17S9F6I0YT0JGcpm4014kG1eqkNAnnxX10JmmW8uPx1aYGG2X7GfhcYoKaizl4VB5ks/pPn+vL9bKR6qQzd0xFdJ9z4S03e5+bDf0NfYADp4wUSi9QIDAQAB'
+release_cert='308203b33082029ba003020102020900e10413c773c3c54f300d06092a864886f70d01010505003070310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c6531123010060355040a0c094c696e656167654f5331123010060355040b0c094c696e656167654f533112301006035504030c094c696e656167654f53301e170d3137303130373034323132355a170d3434303532353034323132355a3070310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c6531123010060355040a0c094c696e656167654f5331123010060355040b0c094c696e656167654f533112301006035504030c094c696e656167654f5330820122300d06092a864886f70d01010105000382010f003082010a0282010100a64dd3e1f842038ff03f67b8e9bf09530fc2913cb53e3654c78ec20dbc8b1e7113628ca5abc0860560cb442c1b51f98b6dce5e59c49037f27f64f48aef0490ab99106f0807a2130e1a8b3aacd834e656f0854b602677b66c007b14c2d0c28d0dc61341de648d879db59a9e79e12888731a3bb1c61fb58f725ac071de12b61093decc5685ac2474cee2cf22f0e44bf208410d91c5d1f60aa7945548822bad756a70e0fe17c83f35e5fa59be4dd2b446048f9b42332186ff0aa47ed39c210de9fd981c55719ec8f30caac4415c21605f13f57d0ecdf0682c484e9456748debe80e2f2e0b1cf47fb95d92fff4673e16f63781f3b2c7cd4b904041abeb5df85059990203010001a350304e301d0603551d0e0416041472963227d66c4c4d5fa0916ac22c793cd45f435c301f0603551d2304183016801472963227d66c4c4d5fa0916ac22c793cd45f435c300c0603551d13040530030101ff300d06092a864886f70d010105050003820101008a247e01c9c72e7167dec24993f23f539c07618c2819ee4d3eae96d283173aebe112469405c4c48052ef1ec72f2949ac0677b49eac5055bea2fe75a8dd3dfc8b3ccf88fb10c4a0c2779dc2861e80b445128c18485ce2b4b4f74ff0389247659b5386572374d0f9bf26ccc261e783397ca3808d5a19de26f8db84d92ac6872b0bdbc60ac768e4ac74d142e7f2c5d5c1cabbca5d1baadb923252b19ba0950370d556707c37c399c936a67e08fd1748cea5656d8add7fa2177ed630796d8de0fae9570b274a04590984d5c065fe307a5346eb54962e41f580d1a448463112e2d5f491a39bb9f9a2a0cac5855c28a35fbdb1ad4a6aeab2aea2f9056e73fc487737eb'
+release_key='MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApk3T4fhCA4/wP2e46b8JUw/CkTy1PjZUx47CDbyLHnETYoylq8CGBWDLRCwbUfmLbc5eWcSQN/J/ZPSK7wSQq5kQbwgHohMOGos6rNg05lbwhUtgJne2bAB7FMLQwo0NxhNB3mSNh521mp554SiIcxo7scYftY9yWsBx3hK2EJPezFaFrCR0zuLPIvDkS/IIQQ2RxdH2CqeUVUiCK611anDg/hfIPzXl+lm+TdK0RgSPm0IzIYb/CqR+05whDen9mBxVcZ7I8wyqxEFcIWBfE/V9Ds3waCxITpRWdI3r6A4vLgsc9H+5XZL/9Gc+FvY3gfOyx81LkEBBq+td+FBZmQIDAQAB'
+
+# Test keys
+media_cert_test='308204a830820390a003020102020900f2b98e6123572c4e300d06092a864886f70d0101040500308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d301e170d3038303431353233343035375a170d3335303930313233343035375a308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d30820120300d06092a864886f70d01010105000382010d00308201080282010100ae250c5a16ef97fc2869ac651b3217cc36ba0e86964168d58a049f40ce85867123a3ffb4f6d949c33cf2da3a05c23eacaa57d803889b1759bcf59e7c6f21890ae25085b7ed56aa626c0989ef9ccd36362ca0e8d1b9603fd4d8328767926ccc090c68b775ae7ff30934cc369ef2855a2667df0c667fd0c7cf5d8eba655806737303bb624726eabaedfb72f07ed7a76ab3cb9a381c4b7dcd809b140d891f00213be401f58d6a06a61eadc3a9c2f1c6567285b09ae09342a66fa421eaf93adf7573a028c331d70601ab3af7cc84033ece7c772a3a5b86b0dbe9d777c3a48aa9801edcee2781589f44d9e4113979600576a99410ba81091259dad98c6c68ff784b8f020103a381fc3081f9301d0603551d0e04160414ca293caa8bc0ed3e542eef4205a2bff2b57e4d753081c90603551d230481c13081be8014ca293caa8bc0ed3e542eef4205a2bff2b57e4d75a1819aa48197308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d820900f2b98e6123572c4e300c0603551d13040530030101ff300d06092a864886f70d0101040500038201010084de9516d5e4a87217a73da8487048f53373a5f733f390d61bdf3cc9e5251625bfcaa7c3159cae275d172a9ae1e876d5458127ac542f68290dd510c0029d8f51e0ee156b7b7b5acdb394241b8ec78b74e5c42c5cafae156caf5bd199a23a27524da072debbe378464a533630b0e4d0ffb7e08ecb701fadb6379c74467f6e00c6ed888595380792038756007872c8e3007af423a57a2cab3a282869b64c4b7bd5fc187d0a7e2415965d5aae4e07a6df751b4a75e9793c918a612b81cd0b628aee0168dc44e47b10d3593260849d6adf6d727dc24444c221d3f9ecc368cad07999f2b8105bc1f20d38d41066cc1411c257a96ea4349f5746565507e4e8020a1a81'
+media_key_test='MIIBIDANBgkqhkiG9w0BAQEFAAOCAQ0AMIIBCAKCAQEAriUMWhbvl/woaaxlGzIXzDa6DoaWQWjVigSfQM6FhnEjo/+09tlJwzzy2joFwj6sqlfYA4ibF1m89Z58byGJCuJQhbftVqpibAmJ75zNNjYsoOjRuWA/1Ngyh2eSbMwJDGi3da5/8wk0zDae8oVaJmffDGZ/0MfPXY66ZVgGc3MDu2JHJuq67fty8H7Xp2qzy5o4HEt9zYCbFA2JHwAhO+QB9Y1qBqYercOpwvHGVnKFsJrgk0Kmb6Qh6vk633VzoCjDMdcGAas698yEAz7OfHcqOluGsNvp13fDpIqpgB7c7ieBWJ9E2eQROXlgBXaplBC6gQkSWdrZjGxo/3hLjwIBAw=='
+platform_cert_test='308204a830820390a003020102020900b3998086d056cffa300d06092a864886f70d0101040500308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d301e170d3038303431353232343035305a170d3335303930313232343035305a308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d30820120300d06092a864886f70d01010105000382010d003082010802820101009c780592ac0d5d381cdeaa65ecc8a6006e36480c6d7207b12011be50863aabe2b55d009adf7146d6f2202280c7cd4d7bdb26243b8a806c26b34b137523a49268224904dc01493e7c0acf1a05c874f69b037b60309d9074d24280e16bad2a8734361951eaf72a482d09b204b1875e12ac98c1aa773d6800b9eafde56d58bed8e8da16f9a360099c37a834a6dfedb7b6b44a049e07a269fccf2c5496f2cf36d64df90a3b8d8f34a3baab4cf53371ab27719b3ba58754ad0c53fc14e1db45d51e234fbbe93c9ba4edf9ce54261350ec535607bf69a2ff4aa07db5f7ea200d09a6c1b49e21402f89ed1190893aab5a9180f152e82f85a45753cf5fc19071c5eec827020103a381fc3081f9301d0603551d0e041604144fe4a0b3dd9cba29f71d7287c4e7c38f2086c2993081c90603551d230481c13081be80144fe4a0b3dd9cba29f71d7287c4e7c38f2086c299a1819aa48197308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d820900b3998086d056cffa300c0603551d13040530030101ff300d06092a864886f70d01010405000382010100572551b8d93a1f73de0f6d469f86dad6701400293c88a0cd7cd778b73dafcc197fab76e6212e56c1c761cfc42fd733de52c50ae08814cefc0a3b5a1a4346054d829f1d82b42b2048bf88b5d14929ef85f60edd12d72d55657e22e3e85d04c831d613d19938bb8982247fa321256ba12d1d6a8f92ea1db1c373317ba0c037f0d1aff645aef224979fba6e7a14bc025c71b98138cef3ddfc059617cf24845cf7b40d6382f7275ed738495ab6e5931b9421765c491b72fb68e080dbdb58c2029d347c8b328ce43ef6a8b15533edfbe989bd6a48dd4b202eda94c6ab8dd5b8399203daae2ed446232e4fe9bd961394c6300e5138e3cfd285e6e4e483538cb8b1b357'
+platform_key_test='MIIBIDANBgkqhkiG9w0BAQEFAAOCAQ0AMIIBCAKCAQEAnHgFkqwNXTgc3qpl7MimAG42SAxtcgexIBG+UIY6q+K1XQCa33FG1vIgIoDHzU172yYkO4qAbCazSxN1I6SSaCJJBNwBST58Cs8aBch09psDe2AwnZB00kKA4WutKoc0NhlR6vcqSC0JsgSxh14SrJjBqnc9aAC56v3lbVi+2OjaFvmjYAmcN6g0pt/tt7a0SgSeB6Jp/M8sVJbyzzbWTfkKO42PNKO6q0z1M3GrJ3GbO6WHVK0MU/wU4dtF1R4jT7vpPJuk7fnOVCYTUOxTVge/aaL/SqB9tffqIA0JpsG0niFAL4ntEZCJOqtakYDxUugvhaRXU89fwZBxxe7IJwIBAw=='
+shared_cert_test='308204a830820390a003020102020900f2a73396bd38767a300d06092a864886f70d0101040500308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d301e170d3038303732333231353735395a170d3335313230393231353735395a308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d30820120300d06092a864886f70d01010105000382010d00308201080282010100c8c2dbfd094a2df45c3ff1a32ed21805ec72fc58d017971bd0f6b52c262d70819d191967e158dfd3a2c7f1b3e0e80ce545d79d2848220211eb86f0fd8312d37b420c113750cc94618ae872f4886463bdc4627caa0c0483c86493e3515571170338bfdcc4cd6addd1c0a2f35f5cf24ed3e4043a3e58e2b05e664ccde12bcb67735fd6df1249c369e62542bc0a4729e53917f5c38ffa52d17b73c9c73798ddb18ed481590875547e66bfc5daca4c25a6eb960ed96923709da302ba646cb496b325e86c5c8b2e7a3377b2bbe4c7cf33254291163f689152ac088550c83c508f4bf5adf0aed5a2dca0583f9ab0ad17650db7eea4b23fdb45885547d0feab72183889020103a381fc3081f9301d0603551d0e04160414cb4c7e2cdbb3f0ada98dab79968d172e9dbb1ed13081c90603551d230481c13081be8014cb4c7e2cdbb3f0ada98dab79968d172e9dbb1ed1a1819aa48197308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d820900f2a73396bd38767a300c0603551d13040530030101ff300d06092a864886f70d0101040500038201010040a8d096997959e917a36c44246b6bac2bae05437ecd89794118f7834720352d1c6f8a39b0869942f4da65981faa2951d33971129ec1921d795671c527d6e249f252829faf5b591310311e2de096500d568ad4114a656dc34a8c6f610453afc1ea7992dba4aa7b3f8543a6e35c0728de77fe97eeac83771fd0ec90f8e4449434ee0b6045783e70c7a2e460249260e003cf7608dc352a4c9ef706def4b26050e978ae2fffd7a3323787014915eb3cc874fcc7a9ae930877c5c8c7d1c2e2a8ee863c89180d1855cedba400e7ba43cccaa7243d397e7c0e8e8e4d7d4f92b6bbead49c0cf018069eddca2e7e2fb4668d89dbbd7950d0cd254180fa1eaafc2a556f84'
+shared_key_test='MIIBIDANBgkqhkiG9w0BAQEFAAOCAQ0AMIIBCAKCAQEAyMLb/QlKLfRcP/GjLtIYBexy/FjQF5cb0Pa1LCYtcIGdGRln4Vjf06LH8bPg6AzlRdedKEgiAhHrhvD9gxLTe0IMETdQzJRhiuhy9IhkY73EYnyqDASDyGST41FVcRcDOL/cxM1q3dHAovNfXPJO0+QEOj5Y4rBeZkzN4SvLZ3Nf1t8SScNp5iVCvApHKeU5F/XDj/pS0Xtzycc3mN2xjtSBWQh1VH5mv8XaykwlpuuWDtlpI3CdowK6ZGy0lrMl6Gxciy56M3eyu+THzzMlQpEWP2iRUqwIhVDIPFCPS/Wt8K7VotygWD+asK0XZQ237qSyP9tFiFVH0P6rchg4iQIBAw=='
+test_cert='308204a830820390a003020102020900936eacbe07f201df300d06092a864886f70d0101050500308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d301e170d3038303232393031333334365a170d3335303731373031333334365a308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d30820120300d06092a864886f70d01010105000382010d00308201080282010100d6931904dec60b24b1edc762e0d9d8253e3ecd6ceb1de2ff068ca8e8bca8cd6bd3786ea70aa76ce60ebb0f993559ffd93e77a943e7e83d4b64b8e4fea2d3e656f1e267a81bbfb230b578c20443be4c7218b846f5211586f038a14e89c2be387f8ebecf8fcac3da1ee330c9ea93d0a7c3dc4af350220d50080732e0809717ee6a053359e6a694ec2cb3f284a0a466c87a94d83b31093a67372e2f6412c06e6d42f15818dffe0381cc0cd444da6cddc3b82458194801b32564134fbfde98c9287748dbf5676a540d8154c8bbca07b9e247553311c46b9af76fdeeccc8e69e7c8a2d08e782620943f99727d3c04fe72991d99df9bae38a0b2177fa31d5b6afee91f020103a381fc3081f9301d0603551d0e04160414485900563d272c46ae118605a47419ac09ca8c113081c90603551d230481c13081be8014485900563d272c46ae118605a47419ac09ca8c11a1819aa48197308194310b3009060355040613025553311330110603550408130a43616c69666f726e6961311630140603550407130d4d6f756e7461696e20566965773110300e060355040a1307416e64726f69643110300e060355040b1307416e64726f69643110300e06035504031307416e64726f69643122302006092a864886f70d0109011613616e64726f696440616e64726f69642e636f6d820900936eacbe07f201df300c0603551d13040530030101ff300d06092a864886f70d010105050003820101007aaf968ceb50c441055118d0daabaf015b8a765a27a715a2c2b44f221415ffdace03095abfa42df70708726c2069e5c36eddae0400be29452c084bc27eb6a17eac9dbe182c204eb15311f455d824b656dbe4dc2240912d7586fe88951d01a8feb5ae5a4260535df83431052422468c36e22c2a5ef994d61dd7306ae4c9f6951ba3c12f1d1914ddc61f1a62da2df827f603fea5603b2c540dbd7c019c36bab29a4271c117df523cdbc5f3817a49e0efa60cbd7f74177e7a4f193d43f4220772666e4c4d83e1bd5a86087cf34f2dec21e245ca6c2bb016e683638050d2c430eea7c26a1c49d3760a58ab7f1a82cc938b4831384324bd0401fa12163a50570e684d'
+test_key='MIIBIDANBgkqhkiG9w0BAQEFAAOCAQ0AMIIBCAKCAQEA1pMZBN7GCySx7cdi4NnYJT4+zWzrHeL/Boyo6LyozWvTeG6nCqds5g67D5k1Wf/ZPnepQ+foPUtkuOT+otPmVvHiZ6gbv7IwtXjCBEO+THIYuEb1IRWG8DihTonCvjh/jr7Pj8rD2h7jMMnqk9Cnw9xK81AiDVAIBzLggJcX7moFM1nmppTsLLPyhKCkZsh6lNg7MQk6ZzcuL2QSwG5tQvFYGN/+A4HMDNRE2mzdw7gkWBlIAbMlZBNPv96YySh3SNv1Z2pUDYFUyLvKB7niR1UzEcRrmvdv3uzMjmnnyKLQjngmIJQ/mXJ9PAT+cpkdmd+brjigshd/ox1bav7pHwIBAw=='
+
+
+PACKAGES=/data/system/packages.xml
+PACKAGES_BACKUP=/data/system/packages-backup.xml
+
+case "$1" in
+ "official")
+ to_official=true
+ echo "Unofficial to official migration"
+ ;;
+ "unofficial")
+ echo "Official to unofficial migration"
+ ;;
+ *)
+ echo "USAGE: $0 official|unofficial"
+ exit 1
+ ;;
+esac
+
+# If there's a "backup" copy, then the current packages.xml might
+# be corrupted, so overwrite it with the backup copy. This is what
+# PackageManager would do.
+if [ -f $PACKAGES_BACKUP ]; then
+ mv $PACKAGES_BACKUP $PACKAGES
+ echo "Overwriting $PACKAGES with $PACKAGES_BACKUP"
+fi
+
+if [ ! -f $PACKAGES ]; then
+ echo "Error: $PACKAGES not found"
+ exit 1
+fi
+
+# Save a copy of the current file, not to be confused with
+# packages-backup.xml, which is used and generated by Android
+echo "Saving backup as $PACKAGES.bak"
+cp $PACKAGES $PACKAGES.bak
+
+if [ "$to_official" = "true" ]; then
+ sed -i "s#$media_cert_test#$media_cert_release#g" $PACKAGES
+ sed -i "s#$platform_cert_test#$platform_cert_release#g" $PACKAGES
+ sed -i "s#$shared_cert_test#$shared_cert_release#g" $PACKAGES
+ sed -i "s#$test_cert#$release_cert#g" $PACKAGES
+ sed -i "s#$media_key_test#$media_key_release#g" $PACKAGES
+ sed -i "s#$platform_key_test#$platform_key_release#g" $PACKAGES
+ sed -i "s#$shared_key_test#$shared_key_release#g" $PACKAGES
+ sed -i "s#$test_key#$release_key#g" $PACKAGES
+else
+ sed -i "s#$media_cert_release#$media_cert_test#g" $PACKAGES
+ sed -i "s#$platform_cert_release#$platform_cert_test#g" $PACKAGES
+ sed -i "s#$shared_cert_release#$shared_cert_test#g" $PACKAGES
+ sed -i "s#$release_cert#$test_cert#g" $PACKAGES
+ sed -i "s#$media_key_release#$media_key_test#g" $PACKAGES
+ sed -i "s#$platform_key_release#$platform_key_test#g" $PACKAGES
+ sed -i "s#$shared_key_release#$shared_key_test#g" $PACKAGES
+ sed -i "s#$release_key#$test_key#g" $PACKAGES
+fi
+
+chmod 660 $PACKAGES
+chown system:system $PACKAGES
+
+echo "Done"
diff --git a/lineage-push/README.md b/lineage-push/README.md
new file mode 100644
index 0000000..38f7bdc
--- /dev/null
+++ b/lineage-push/README.md
@@ -0,0 +1,37 @@
+# LineageOS Push Script
+
+```
+usage: lineage-push.py [-h] [-a] [-b] [-d] [-e] [-f] [-l LABEL] [-m [MESSAGE]]
+ [-p [PRIVATE]] [-r REF] [-s] [-t TOPIC] [-w [WIP]]
+ branch
+
+Pushes a local git repository's changes to Gerrit for code review
+
+positional arguments:
+ branch upload change to branch
+
+optional arguments:
+ -h, --help show this help message and exit
+ -a, --hashtag add hashtag to change
+ -b, --bypass bypass review and merge
+ -d, --draft upload change as draft
+ -e, --edit upload change as edit
+ -f, --force force push
+ -l LABEL, --label LABEL
+ assign label
+ -m [MESSAGE], --message [MESSAGE]
+ add message to change
+ -p [PRIVATE], --private [PRIVATE]
+ upload change as private
+ -r REF, --ref REF push to specified ref
+ -s, --submit submit change
+ -t TOPIC, --topic TOPIC
+ append topic to change
+ -w [WIP], --wip [WIP]
+ upload change as WIP
+```
+```
+ Examples:
+ lineage-push -d -t test cm-14.1
+ lineage-push -s -l "Code-Review+2,Verified+1" cm-14.1
+```
diff --git a/lineage-push/lineage-push.py b/lineage-push/lineage-push.py
new file mode 100755
index 0000000..c2f07a9
--- /dev/null
+++ b/lineage-push/lineage-push.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import re
+import subprocess
+import sys
+from argparse import ArgumentParser, ArgumentTypeError
+
+try:
+ from urllib.parse import quote_plus
+except ImportError:
+ from urllib import quote_plus
+
+
+def push(args):
+ command = 'git push'
+ parameters = []
+
+ if args.force:
+ command += ' -f'
+
+ username = subprocess.check_output(
+ ["git", "config", "review.review.lineageos.org.username"]).decode("utf-8").strip()
+ remotes = subprocess.check_output(
+ ["git", "remote", "-v"]).decode("utf-8").strip()
+ if "github.com/LineageOS" in remotes or "git@github.com:LineageOS" in remotes:
+ repo = re.search(r'LineageOS\S+', remotes).group(0)
+ elif "android.googlesource.com" in remotes:
+ repo = re.search(r'platform\S+', remotes).group(0)
+ repo = repo.replace("/", "_").replace("platform", "LineageOS/android")
+
+ command += ' ssh://{}@review.lineageos.org:29418/{}'.format(
+ username, repo)
+ command += ' HEAD:'
+
+ if args.ref != 'for':
+ command += 'refs/{}/'.format(args.ref)
+ elif args.bypass:
+ command += ''
+ elif args.draft:
+ command += 'refs/drafts/'
+ else:
+ command += 'refs/{}/'.format(args.ref)
+
+ command += args.branch
+
+ if args.label:
+ for label in args.label.split(','):
+ parameters.append('l={}'.format(label))
+
+ if args.edit:
+ parameters.append('edit')
+
+ if args.topic:
+ parameters.append('topic={}'.format(args.topic))
+
+ if args.hashtag:
+ parameters.append('hashtag={}'.format(args.hashtag))
+
+ if args.submit:
+ parameters.append('submit')
+
+ if args.private == True:
+ parameters.append('private')
+ elif args.private == False:
+ parameters.append('remove-private')
+
+ if args.wip == True:
+ parameters.append('wip')
+ elif args.wip == False:
+ parameters.append('ready')
+
+ if args.message:
+ parameters.append('m={}'.format(quote_plus(args.message)))
+
+ if len(parameters) > 0:
+ command += "%" + ','.join(parameters)
+
+ sys.exit(subprocess.call(command.split(' ')))
+
+
+def str2bool(v):
+ if v.lower() in ('yes', 'true', 't', 'y', '1'):
+ return True
+ elif v.lower() in ('no', 'false', 'f', 'n', '0'):
+ return False
+ else:
+ raise ArgumentTypeError('Boolean value expected.')
+
+
+def parse_cmdline():
+ parser = ArgumentParser(
+ description='Pushes a local git repository\'s changes to Gerrit for code review')
+ parser.add_argument('branch', help='upload change to branch')
+ parser.add_argument('-a', '--hashtag', action='store_true',
+ help='add hashtag to change')
+ parser.add_argument('-b', '--bypass', action='store_true',
+ help='bypass review and merge')
+ parser.add_argument('-d', '--draft', action='store_true',
+ help='upload change as draft')
+ parser.add_argument('-e', '--edit', action='store_true',
+ help='upload change as edit')
+ parser.add_argument(
+ '-f', '--force', action='store_true', help='force push')
+ parser.add_argument('-l', '--label', help='assign label')
+ parser.add_argument('-m', '--message', nargs='?',
+ help='add message to change')
+ parser.add_argument('-p', '--private', type=str2bool, nargs='?',
+ const=True, help='upload change as private')
+ parser.add_argument(
+ '-r', '--ref', help='push to specified ref', default="for")
+ parser.add_argument(
+ '-s', '--submit', action='store_true', help='submit change')
+ parser.add_argument('-t', '--topic', help='append topic to change')
+ parser.add_argument('-w', '--wip', type=str2bool, nargs='?',
+ const=True, help='upload change as WIP')
+ return parser.parse_args()
+
+
+def main():
+ args = parse_cmdline()
+ push(args)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/maintainer-checks/README.md b/maintainer-checks/README.md
new file mode 100644
index 0000000..3ae6401
--- /dev/null
+++ b/maintainer-checks/README.md
@@ -0,0 +1,2 @@
+1. Use Python 3.2 or higher
+2. `pip3 install -r requirements.txt`
diff --git a/maintainer-checks/maintainers.py b/maintainer-checks/maintainers.py
new file mode 100755
index 0000000..fa83a12
--- /dev/null
+++ b/maintainer-checks/maintainers.py
@@ -0,0 +1,182 @@
+#!/usr/bin/python3
+
+import yaml
+import re
+import os
+import json
+import argparse
+import urllib.request, urllib.error
+
+mydir = os.path.dirname(os.path.abspath(__file__))
+
+parser = argparse.ArgumentParser()
+parser.add_argument('-m', '--maintainers', help='list maintainers for devices', action='store_true', required=False)
+parser.add_argument('-j', '--jira', dest="jira_file", required=False, help='Path to list of jira developers', metavar='FILE')
+args = parser.parse_args()
+
+# Paths to certain repos
+repo = {
+ "updater": "../../jenkins/updater",
+ "wiki": "../../wiki",
+ "hudson": "../../jenkins",
+ "cve": "../../cve"
+}
+
+# List of all codenames in hudson
+codenames = []
+# List of devices in cve tracker
+cve_entries = []
+# List of devices with updater pages
+updater_pages = []
+# List of jira developers
+jira_devs = []
+# Discontinued devices
+discontinued_devices = []
+
+# Open file and input lines as items in list
+hudson_file = os.path.join(mydir, repo["hudson"] + "/lineage-build-targets")
+with open(hudson_file) as f:
+ for line in f:
+ # Ignore blank lines or lines with comments
+ if re.match(r"^\s*$", line) or re.match(r"#", line):
+ continue
+ # Add codenames to list
+ codenames.append(re.sub(r" .*", "", line.strip()))
+
+# Sort codenames alphabetically
+codenames.sort()
+
+# Create list of devices in cve tracker
+cve_json_file = os.path.join(mydir, repo["cve"] + "/kernels.json")
+with open(cve_json_file) as f:
+ json_file = json.load(f)
+
+for kernel in json_file:
+ for device in json_file[kernel]:
+ device = re.sub(r"android_device_[a-zA-Z0-9]*_", "", device)
+ cve_entries.append(device)
+
+# CVE tracker checking
+for codename in codenames:
+ if codename not in cve_entries:
+ print("{} doesn't have an entry in the CVE tracker".format(codename))
+
+# Create list of updater pages
+updater_json_file = os.path.join(mydir, repo["updater"] + "/devices.json")
+with open(updater_json_file) as f:
+ json_file = json.load(f)
+for device in json_file:
+ updater_pages.append(device["model"])
+
+# Wiki checking
+for codename in codenames:
+ wiki_yml_file = os.path.join(mydir, repo["wiki"] + "/_data/devices/" + codename + ".yml")
+ try:
+ with open(wiki_yml_file) as f:
+ yml = yaml.load(f)
+ except FileNotFoundError:
+ print("{} doesn't have a wiki page".format(codename))
+ continue
+ try:
+ if not yml["maintainers"]:
+ print("{} doesn't have a maintainer listed".format(codename))
+ except KeyError:
+ print("{} doesn't have a maintainers field".format(codename))
+ try:
+ if not yml["install_method"]:
+ print("{} doesn't have an install method listed".format(codename))
+ elif "fastboot_generic" in yml["install_method"]:
+ print("{} uses fastboot_generic install method".format(codename))
+ elif "dd" in yml["install_method"]:
+ try:
+ if not yml["recovery_partition"]:
+ print("{} doesn't have a recovery partition listed".format(codename))
+ except KeyError:
+ print("{} doesn't have a recovery partition field".format(codename))
+ try:
+ if not yml["root_method"]:
+ print("{} doesn't have a root method listed".format(codename))
+ except KeyError:
+ print("{} doesn't have a root method field".format(codename))
+ except KeyError:
+ print("{} doesn't have an install method field".format(codename))
+
+ try:
+ if "custom_twrp_codename" in yml:
+ twrp_url = "https://dl.twrp.me/" + yml["custom_twrp_codename"]
+ else:
+ twrp_url = "https://dl.twrp.me/" + codename
+
+ conn = urllib.request.urlopen(twrp_url)
+ except urllib.error.HTTPError:
+ if "custom_twrp_link" in yml:
+ try:
+ conn = urllib.request.urlopen(yml["custom_twrp_link"])
+ except urllib.error.HTTPError:
+ print("{} has an invalid custom twrp link value".format(codename))
+ else:
+ print("{} doesn't have official twrp or a custom twrp link field".format(codename))
+ else:
+ if "custom_twrp_link" in yml:
+ print("{} has both official twrp and a custom twrp link listed".format(codename))
+
+wiki_yml_dir = os.path.join(mydir, repo["wiki"] + "/_data/devices")
+for wiki_yml in os.listdir(wiki_yml_dir):
+ codename = re.sub(r"\.yml", "", wiki_yml.strip())
+ if codename not in codenames:
+ wiki_yml_file = os.path.join(mydir, repo["wiki"] + "/_data/devices/" + wiki_yml)
+ with open(wiki_yml_file) as f:
+ yml = yaml.load(f)
+ if "discontinued" not in yml["channels"]:
+ print("{} has a wiki page but isn't in hudson".format(codename))
+ else:
+ discontinued_devices.append(codename)
+
+# Updater checking
+for codename in codenames:
+ if codename not in updater_pages:
+ print("{} doesn't have an updater page".format(codename))
+
+for codename in updater_pages:
+ if codename not in codenames and codename not in discontinued_devices:
+ print("{} has an updater page but is not in hudson".format(codename))
+
+# Optionally print out all maintainer info
+if args.maintainers:
+ print("---------------MAINTAINER INFO DUMP---------------")
+ for codename in codenames:
+ wiki_yml_file = os.path.join(mydir, repo["wiki"] + "/_data/devices/" + codename + ".yml")
+ toprint = "{}:".format(codename)
+ try:
+ with open(wiki_yml_file) as f:
+ yml = yaml.load(f)
+ except FileNotFoundError:
+ # Skip devices without wiki pages, we already errored about it
+ continue
+ try:
+ for maintainer in yml["maintainers"]:
+ toprint += ", {}".format(maintainer)
+ except KeyError:
+ # Skip devices without maintainer fields, we already errored about it
+ continue
+ print(toprint.replace(":,", ":"))
+
+if args.jira_file:
+ with open(args.jira_file) as f:
+ for line in f:
+ jira_devs.append(line.strip())
+ for codename in codenames:
+ wiki_yml_file = os.path.join(mydir, repo["wiki"] + "/_data/devices/" + codename + ".yml")
+ try:
+ with open(wiki_yml_file) as f:
+ yml = yaml.load(f)
+ except FileNotFoundError:
+ # Skip devices without wiki pages, we already errored about it
+ continue
+ try:
+ for maintainer in yml["maintainers"]:
+ if maintainer not in jira_devs:
+ print("{} is listed as a maintainer for {} but doesn't have a jira developer account".format(maintainer, codename))
+ except KeyError:
+ # Skip devices without maintainer fields, we already errored about it
+ continue
diff --git a/maintainer-checks/requirements.txt b/maintainer-checks/requirements.txt
new file mode 100644
index 0000000..c3726e8
--- /dev/null
+++ b/maintainer-checks/requirements.txt
@@ -0,0 +1 @@
+pyyaml
diff --git a/extract.py b/update-payload-extractor/extract.py
index 266ef0a..266ef0a 100755
--- a/extract.py
+++ b/update-payload-extractor/extract.py
diff --git a/update_payload/__init__.py b/update-payload-extractor/update_payload/__init__.py
index 8ee95e2..8ee95e2 100644
--- a/update_payload/__init__.py
+++ b/update-payload-extractor/update_payload/__init__.py
diff --git a/update_payload/applier.py b/update-payload-extractor/update_payload/applier.py
index 9582b3d..9582b3d 100644
--- a/update_payload/applier.py
+++ b/update-payload-extractor/update_payload/applier.py
diff --git a/update_payload/checker.py b/update-payload-extractor/update_payload/checker.py
index e241b0b..e241b0b 100644
--- a/update_payload/checker.py
+++ b/update-payload-extractor/update_payload/checker.py
diff --git a/update_payload/checker_unittest.py b/update-payload-extractor/update_payload/checker_unittest.py
index f718234..f718234 100755
--- a/update_payload/checker_unittest.py
+++ b/update-payload-extractor/update_payload/checker_unittest.py
diff --git a/update_payload/common.py b/update-payload-extractor/update_payload/common.py
index 4e7b2e3..4e7b2e3 100644
--- a/update_payload/common.py
+++ b/update-payload-extractor/update_payload/common.py
diff --git a/update_payload/error.py b/update-payload-extractor/update_payload/error.py
index 6f95433..6f95433 100644
--- a/update_payload/error.py
+++ b/update-payload-extractor/update_payload/error.py
diff --git a/update_payload/format_utils.py b/update-payload-extractor/update_payload/format_utils.py
index 6248ba9..6248ba9 100644
--- a/update_payload/format_utils.py
+++ b/update-payload-extractor/update_payload/format_utils.py
diff --git a/update_payload/format_utils_unittest.py b/update-payload-extractor/update_payload/format_utils_unittest.py
index 42ea621..42ea621 100755
--- a/update_payload/format_utils_unittest.py
+++ b/update-payload-extractor/update_payload/format_utils_unittest.py
diff --git a/update_payload/histogram.py b/update-payload-extractor/update_payload/histogram.py
index 1ac2ab5..1ac2ab5 100644
--- a/update_payload/histogram.py
+++ b/update-payload-extractor/update_payload/histogram.py
diff --git a/update_payload/histogram_unittest.py b/update-payload-extractor/update_payload/histogram_unittest.py
index e757dd0..e757dd0 100755
--- a/update_payload/histogram_unittest.py
+++ b/update-payload-extractor/update_payload/histogram_unittest.py
diff --git a/update_payload/payload-test-key.pem b/update-payload-extractor/update_payload/payload-test-key.pem
index 342e923..342e923 100644
--- a/update_payload/payload-test-key.pem
+++ b/update-payload-extractor/update_payload/payload-test-key.pem
diff --git a/update_payload/payload-test-key.pub b/update-payload-extractor/update_payload/payload-test-key.pub
index fdae963..fdae963 100644
--- a/update_payload/payload-test-key.pub
+++ b/update-payload-extractor/update_payload/payload-test-key.pub
diff --git a/update_payload/payload.py b/update-payload-extractor/update_payload/payload.py
index 380d6d0..380d6d0 100644
--- a/update_payload/payload.py
+++ b/update-payload-extractor/update_payload/payload.py
diff --git a/update_payload/test_utils.py b/update-payload-extractor/update_payload/test_utils.py
index 1e2259d..1e2259d 100644
--- a/update_payload/test_utils.py
+++ b/update-payload-extractor/update_payload/test_utils.py
diff --git a/update_payload/update-payload-key.pub.pem b/update-payload-extractor/update_payload/update-payload-key.pub.pem
index 7ac369f..7ac369f 100644
--- a/update_payload/update-payload-key.pub.pem
+++ b/update-payload-extractor/update_payload/update-payload-key.pub.pem
diff --git a/update_payload/update_metadata_pb2.py b/update-payload-extractor/update_payload/update_metadata_pb2.py
index 595f2f6..595f2f6 100644
--- a/update_payload/update_metadata_pb2.py
+++ b/update-payload-extractor/update_payload/update_metadata_pb2.py