From 3678b290359b618b24c9a2e8d6abdcf6071dea2d Mon Sep 17 00:00:00 2001 From: Hansong Zhang Date: Wed, 14 Jan 2026 13:37:10 -0800 Subject: [PATCH 1/6] test --- .github/workflows/export-stories-model.yml | 77 ++++++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 .github/workflows/export-stories-model.yml diff --git a/.github/workflows/export-stories-model.yml b/.github/workflows/export-stories-model.yml new file mode 100644 index 00000000000..487395e7bf2 --- /dev/null +++ b/.github/workflows/export-stories-model.yml @@ -0,0 +1,77 @@ +name: Export Stories Model + +on: + workflow_dispatch: + schedule: + - cron: '0 10 * * *' + +jobs: + export-stories-model: + name: export-stories-model + runs-on: ubuntu-latest + timeout-minutes: 30 + permissions: + id-token: write + contents: read + steps: + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install executorch + run: | + pip install executorch + + - name: Download model and tokenizer + run: | + temp_pt="$(mktemp).pt" + temp_params="$(mktemp).json" + + wget -O "$temp_pt" "https://huggingface.co/karpathy/tinyllamas/resolve/main/stories110M.pt" + wget "https://raw.githubusercontent.com/karpathy/llama2.c/master/tokenizer.model" + echo '{"dim": 768, "multiple_of": 32, "n_heads": 12, "n_layers": 12, "norm_eps": 1e-05, "vocab_size": 32000}' > "$temp_params" + + echo "TEMP_PT=$temp_pt" >> $GITHUB_ENV + echo "TEMP_PARAMS=$temp_params" >> $GITHUB_ENV + + - name: Export model to PTE + run: | + python -m executorch.extension.llm.export.export_llm \ + base.checkpoint="$TEMP_PT" \ + base.params="$TEMP_PARAMS" \ + backend.xnnpack.enabled=True \ + model.use_kv_cache=True \ + export.output_name=stories110M.pte + + - name: Cleanup temp files + run: | + rm "$TEMP_PT" "$TEMP_PARAMS" + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: stories110M-pte + path: | + stories110M.pte + tokenizer.model + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1.7.0 + with: + role-to-assume: arn:aws:iam::308535385114:role/gha_executorch_upload-frameworks-android + aws-region: us-east-1 + + - name: Upload to S3 + run: | + pip install awscli==1.32.18 + + shasum -a 256 stories110M.pte > stories110M.pte.sha256sums + shasum -a 256 tokenizer.model > tokenizer.model.sha256sums + + VERSION="snapshot-$(date +"%Y%m%d")" + + aws s3 cp stories110M.pte s3://ossci-android/executorch/stories/${VERSION}/stories110M.pte --acl public-read + aws s3 cp stories110M.pte.sha256sums s3://ossci-android/executorch/stories/${VERSION}/stories110M.pte.sha256sums --acl public-read + aws s3 cp tokenizer.model s3://ossci-android/executorch/stories/${VERSION}/tokenizer.model --acl public-read + aws s3 cp tokenizer.model.sha256sums s3://ossci-android/executorch/stories/${VERSION}/tokenizer.model.sha256sums --acl public-read From 23b99d65b128a877fe6d9c291f711590cb375b71 Mon Sep 17 00:00:00 2001 From: Hansong Zhang Date: Wed, 14 Jan 2026 13:38:05 -0800 Subject: [PATCH 2/6] test --- .github/workflows/export-stories-model.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/export-stories-model.yml b/.github/workflows/export-stories-model.yml index 487395e7bf2..5b7d1f83ffb 100644 --- a/.github/workflows/export-stories-model.yml +++ b/.github/workflows/export-stories-model.yml @@ -1,6 +1,7 @@ name: Export Stories Model on: + pull_request: workflow_dispatch: schedule: - cron: '0 10 * * *' From f8ae34a4c63ba0d4ac15ee120b678e12efad755f Mon Sep 17 00:00:00 2001 From: Hansong Zhang Date: Wed, 14 Jan 2026 13:45:39 -0800 Subject: [PATCH 3/6] try this --- .github/workflows/export-stories-model.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/export-stories-model.yml b/.github/workflows/export-stories-model.yml index 5b7d1f83ffb..4d3983df034 100644 --- a/.github/workflows/export-stories-model.yml +++ b/.github/workflows/export-stories-model.yml @@ -20,9 +20,16 @@ jobs: with: python-version: '3.11' + - name: Checkout executorch + uses: actions/checkout@v4 + with: + repository: pytorch/executorch + submodules: recursive + - name: Install executorch run: | - pip install executorch + ./install_executorch.sh + ./examples/models/llama/install_requirements.sh - name: Download model and tokenizer run: | From 66631c1c3ef2318638aa8ba722fe43488941dbc9 Mon Sep 17 00:00:00 2001 From: Hansong Zhang <107070759+kirklandsign@users.noreply.github.com> Date: Fri, 16 Jan 2026 01:10:05 -0800 Subject: [PATCH 4/6] Rename and update export model workflow --- .github/workflows/export-stories-model.yml | 57 ++++++++++------------ 1 file changed, 25 insertions(+), 32 deletions(-) diff --git a/.github/workflows/export-stories-model.yml b/.github/workflows/export-stories-model.yml index 4d3983df034..88c1ee17876 100644 --- a/.github/workflows/export-stories-model.yml +++ b/.github/workflows/export-stories-model.yml @@ -7,8 +7,8 @@ on: - cron: '0 10 * * *' jobs: - export-stories-model: - name: export-stories-model + export-dl3-model: + name: export-dl3-model runs-on: ubuntu-latest timeout-minutes: 30 permissions: @@ -28,29 +28,29 @@ jobs: - name: Install executorch run: | - ./install_executorch.sh - ./examples/models/llama/install_requirements.sh - - - name: Download model and tokenizer - run: | - temp_pt="$(mktemp).pt" - temp_params="$(mktemp).json" - - wget -O "$temp_pt" "https://huggingface.co/karpathy/tinyllamas/resolve/main/stories110M.pt" - wget "https://raw.githubusercontent.com/karpathy/llama2.c/master/tokenizer.model" - echo '{"dim": 768, "multiple_of": 32, "n_heads": 12, "n_layers": 12, "norm_eps": 1e-05, "vocab_size": 32000}' > "$temp_params" - - echo "TEMP_PT=$temp_pt" >> $GITHUB_ENV - echo "TEMP_PARAMS=$temp_params" >> $GITHUB_ENV + pip install torchvision executorch - name: Export model to PTE run: | - python -m executorch.extension.llm.export.export_llm \ - base.checkpoint="$TEMP_PT" \ - base.params="$TEMP_PARAMS" \ - backend.xnnpack.enabled=True \ - model.use_kv_cache=True \ - export.output_name=stories110M.pte + cat < export_dl3.py + import torch + import torchvision.models as models + from executorch.backends.xnnpack.partition.xnnpack_partitioner import XnnpackPartitioner + from executorch.exir import to_edge_transform_and_lower + def main() -> None: + model = models.segmentation.deeplabv3_resnet101(weights='DEFAULT').eval() + sample_inputs = (torch.randn(1, 3, 224, 224), ) + et_program = to_edge_transform_and_lower( + torch.export.export(model, sample_inputs), + partitioner=[XnnpackPartitioner()], + ).to_executorch() + with open("dl3_xnnpack_fp32.pte", "wb") as file: + et_program.write_to_file(file) + if __name__ == "__main__": + main() + EOF + + python export_dl3.py - name: Cleanup temp files run: | @@ -59,10 +59,9 @@ jobs: - name: Upload artifact uses: actions/upload-artifact@v4 with: - name: stories110M-pte + name: dl3-pte path: | - stories110M.pte - tokenizer.model + dl3_xnnpack_fp32.pte - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v1.7.0 @@ -74,12 +73,6 @@ jobs: run: | pip install awscli==1.32.18 - shasum -a 256 stories110M.pte > stories110M.pte.sha256sums - shasum -a 256 tokenizer.model > tokenizer.model.sha256sums - VERSION="snapshot-$(date +"%Y%m%d")" - aws s3 cp stories110M.pte s3://ossci-android/executorch/stories/${VERSION}/stories110M.pte --acl public-read - aws s3 cp stories110M.pte.sha256sums s3://ossci-android/executorch/stories/${VERSION}/stories110M.pte.sha256sums --acl public-read - aws s3 cp tokenizer.model s3://ossci-android/executorch/stories/${VERSION}/tokenizer.model --acl public-read - aws s3 cp tokenizer.model.sha256sums s3://ossci-android/executorch/stories/${VERSION}/tokenizer.model.sha256sums --acl public-read + aws s3 cp dl3_xnnpack_fp32.pte s3://ossci-android/executorch/models/${VERSION}/dl3_xnnpack_fp32.pte --acl public-read From 96ac62182ec808fe7ba592516fd23fa0921a34eb Mon Sep 17 00:00:00 2001 From: Hansong Zhang <107070759+kirklandsign@users.noreply.github.com> Date: Fri, 16 Jan 2026 01:11:04 -0800 Subject: [PATCH 5/6] Remove executorch checkout step from workflow Removed checkout step for executorch from workflow. --- .github/workflows/export-stories-model.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/export-stories-model.yml b/.github/workflows/export-stories-model.yml index 88c1ee17876..b784d2ee652 100644 --- a/.github/workflows/export-stories-model.yml +++ b/.github/workflows/export-stories-model.yml @@ -20,12 +20,6 @@ jobs: with: python-version: '3.11' - - name: Checkout executorch - uses: actions/checkout@v4 - with: - repository: pytorch/executorch - submodules: recursive - - name: Install executorch run: | pip install torchvision executorch From f0c0b4d28ade5d21bce0371a5c8b0d3870d7dc1c Mon Sep 17 00:00:00 2001 From: Hansong Zhang <107070759+kirklandsign@users.noreply.github.com> Date: Fri, 16 Jan 2026 01:15:28 -0800 Subject: [PATCH 6/6] Remove cleanup step for temp files in export workflow Removed cleanup step for temporary files in workflow. --- .github/workflows/export-stories-model.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/export-stories-model.yml b/.github/workflows/export-stories-model.yml index b784d2ee652..2507487afa0 100644 --- a/.github/workflows/export-stories-model.yml +++ b/.github/workflows/export-stories-model.yml @@ -46,10 +46,6 @@ jobs: python export_dl3.py - - name: Cleanup temp files - run: | - rm "$TEMP_PT" "$TEMP_PARAMS" - - name: Upload artifact uses: actions/upload-artifact@v4 with: