Compare commits
114 Commits
v3.7.0
...
v3.0-branc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bd295942f2 | ||
|
|
3067b71139 | ||
|
|
ca3f0afcde | ||
|
|
5359920017 | ||
|
|
a1ec9a6849 | ||
|
|
a203e1f9f0 | ||
|
|
8d4f91ac68 | ||
|
|
ed97e76426 | ||
|
|
350ddd8eed | ||
|
|
4927fb741a | ||
|
|
fea968e875 | ||
|
|
f18ffe73b9 | ||
|
|
c6fd79616e | ||
|
|
90eecd0729 | ||
|
|
86f1c9cb5a | ||
|
|
fae0571170 | ||
|
|
4d90bc75b2 | ||
|
|
1a3c79f62c | ||
|
|
1c865de9f1 | ||
|
|
bccccb4cc5 | ||
|
|
dfa98f370b | ||
|
|
db747083ef | ||
|
|
22aa0a2aae | ||
|
|
8d0655669a | ||
|
|
41a019a8bb | ||
|
|
e7431464d6 | ||
|
|
aa254060c8 | ||
|
|
93c0a95a9b | ||
|
|
9482c36196 | ||
|
|
f2ad3dece2 | ||
|
|
f455638b95 | ||
|
|
40ee7deedb | ||
|
|
c856453e99 | ||
|
|
5d64424c22 | ||
|
|
a6717210db | ||
|
|
8bb8a65914 | ||
|
|
536c0f3c2e | ||
|
|
ed0126a207 | ||
|
|
a759d6467e | ||
|
|
ad5d399ac9 | ||
|
|
fa6c695f87 | ||
|
|
0da80a27ef | ||
|
|
a0b1163ca0 | ||
|
|
9779183ce2 | ||
|
|
165a1a05ac | ||
|
|
d769c96b03 | ||
|
|
87f25a0690 | ||
|
|
c52ec33d6c | ||
|
|
9517320e39 | ||
|
|
63d06fde33 | ||
|
|
2c24cbcadf | ||
|
|
990e167eba | ||
|
|
548ec52333 | ||
|
|
bcf5fd6fcd | ||
|
|
fc2748e737 | ||
|
|
8d88008c0f | ||
|
|
9a119f9933 | ||
|
|
94af6498bf | ||
|
|
b9653482ab | ||
|
|
eb67b12e25 | ||
|
|
8872dfb904 | ||
|
|
d85fb925c4 | ||
|
|
4bdf630b27 | ||
|
|
a3d6ba0b54 | ||
|
|
7e3d6fa61e | ||
|
|
80e5aa2f6f | ||
|
|
95ab73ff01 | ||
|
|
4bf22baead | ||
|
|
f2a78fdab8 | ||
|
|
55d9be1050 | ||
|
|
74e647d249 | ||
|
|
1be46fb12c | ||
|
|
2aca845d35 | ||
|
|
3f447c0a26 | ||
|
|
c454bc9f4d | ||
|
|
f05315b402 | ||
|
|
9b4cceed52 | ||
|
|
575ad772e6 | ||
|
|
8882136543 | ||
|
|
c9d3457d21 | ||
|
|
4b8d6ea4ee | ||
|
|
fcb3d9dd05 | ||
|
|
4b9d483e49 | ||
|
|
7072cac593 | ||
|
|
814ee2c0f7 | ||
|
|
699e0ad31a | ||
|
|
19be597bb9 | ||
|
|
2cd00bb660 | ||
|
|
1d270ea28a | ||
|
|
ce3888d367 | ||
|
|
a1c391c744 | ||
|
|
373e7399e7 | ||
|
|
0b0a52a9eb | ||
|
|
1eb2a1f38f | ||
|
|
9fd3a8e37c | ||
|
|
bc351fcbe6 | ||
|
|
445ba536dd | ||
|
|
51d1f1e01a | ||
|
|
58878206ab | ||
|
|
96581fe9a6 | ||
|
|
d8d13f35c0 | ||
|
|
05067d2afc | ||
|
|
f51daad0d3 | ||
|
|
30983c0f23 | ||
|
|
d6722c6d99 | ||
|
|
cda74b9a92 | ||
|
|
c1a8af9fa9 | ||
|
|
e139e4467a | ||
|
|
ff51f24f92 | ||
|
|
974f452706 | ||
|
|
ee8995cb72 | ||
|
|
e29e7ca706 | ||
|
|
00f8c8156c | ||
|
|
72893d1f6b |
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-20.04
|
||||
name: Backport
|
||||
steps:
|
||||
- name: Backport
|
||||
|
||||
30
.github/workflows/backport_issue_check.yml
vendored
Normal file
30
.github/workflows/backport_issue_check.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Backport Issue Check
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- v*-branch
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport Issue Check
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- name: Check out source code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
sudo pip3 install -U setuptools wheel pip
|
||||
pip3 install -U pygithub
|
||||
|
||||
- name: Run backport issue checker
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ZB_GITHUB_TOKEN }}
|
||||
run: |
|
||||
./scripts/release/list_backports.py \
|
||||
-o ${{ github.event.repository.owner.login }} \
|
||||
-r ${{ github.event.repository.name }} \
|
||||
-b ${{ github.event.pull_request.base.ref }} \
|
||||
-p ${{ github.event.pull_request.number }}
|
||||
29
.github/workflows/bluetooth-tests-publish.yaml
vendored
Normal file
29
.github/workflows/bluetooth-tests-publish.yaml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Publish Bluetooth Tests Results
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Bluetooth Tests"]
|
||||
types:
|
||||
- completed
|
||||
jobs:
|
||||
bluetooth-test-results:
|
||||
name: "Publish Bluetooth Test Results"
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.event.workflow_run.conclusion != 'skipped'
|
||||
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
workflow: bluetooth-tests.yaml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
- name: Publish Bluetooth Test Results
|
||||
uses: EnricoMi/publish-unit-test-result-action@v1
|
||||
with:
|
||||
check_name: Bluetooth Test Results
|
||||
comment_mode: off
|
||||
commit: ${{ github.event.workflow_run.head_sha }}
|
||||
event_file: event/event.json
|
||||
event_name: ${{ github.event.workflow_run.event }}
|
||||
files: "bluetooth-test-results/**/bsim_results.xml"
|
||||
@@ -1,7 +1,7 @@
|
||||
name: Bluetooth Tests
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
pull_request:
|
||||
paths:
|
||||
- "west.yml"
|
||||
- "subsys/bluetooth/**"
|
||||
@@ -10,17 +10,13 @@ on:
|
||||
- "soc/posix/**"
|
||||
- "arch/posix/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
bluetooth-test-prep:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
bluetooth-test-build:
|
||||
runs-on: ubuntu-latest
|
||||
needs: bluetooth-test-prep
|
||||
bluetooth-test:
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.21.0
|
||||
options: '--entrypoint /bin/bash'
|
||||
@@ -38,10 +34,7 @@ jobs:
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: west setup
|
||||
run: |
|
||||
@@ -58,28 +51,17 @@ jobs:
|
||||
|
||||
- name: Upload Test Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Bluetooth Test Results
|
||||
path: ./bsim_bt_out/bsim_results.xml
|
||||
name: bluetooth-test-results
|
||||
path: |
|
||||
./bsim_bt_out/bsim_results.xml
|
||||
${{ github.event_path }}
|
||||
|
||||
bluetooth-test-results:
|
||||
name: "Publish Bluetooth Test Results"
|
||||
needs: bluetooth-test-build
|
||||
runs-on: ubuntu-20.04
|
||||
# the build-and-test job might be skipped, we don't need to run this job then
|
||||
if: success() || failure()
|
||||
|
||||
steps:
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
- name: Upload Event Details
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
- name: Publish Bluetooth Test Results
|
||||
uses: EnricoMi/publish-unit-test-result-action@v1
|
||||
with:
|
||||
check_name: Bluetooth Test Results
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: "**/bsim_results.xml"
|
||||
comment_mode: off
|
||||
name: event
|
||||
path: |
|
||||
${{ github.event_path }}
|
||||
37
.github/workflows/clang.yaml
vendored
37
.github/workflows/clang.yaml
vendored
@@ -2,22 +2,18 @@ name: Build with Clang/LLVM
|
||||
|
||||
on: pull_request_target
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
clang-build-prep:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
clang-build:
|
||||
runs-on: zephyr_runner
|
||||
needs: clang-build-prep
|
||||
runs-on: zephyr-runner-linux-x64-4xlarge
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.21.0
|
||||
options: '--entrypoint /bin/bash'
|
||||
volumes:
|
||||
- /home/runners/zephyrproject:/github/cache/zephyrproject
|
||||
- /repo-cache/zephyrproject:/github/cache/zephyrproject
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -30,15 +26,18 @@ jobs:
|
||||
outputs:
|
||||
report_needed: ${{ steps.twister.outputs.report_needed }}
|
||||
steps:
|
||||
- name: Cleanup
|
||||
- name: Clone cached Zephyr repository
|
||||
continue-on-error: true
|
||||
run: |
|
||||
# hotfix, until we have a better way to deal with existing data
|
||||
rm -rf zephyr zephyr-testing
|
||||
git clone --shared /github/cache/zephyrproject/zephyr .
|
||||
git remote set-url origin ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Environment Setup
|
||||
run: |
|
||||
@@ -72,7 +71,7 @@ jobs:
|
||||
string(TIMESTAMP current_date "%Y-%m-%d-%H;%M;%S" UTC)
|
||||
string(REPLACE "/" "_" repo ${{github.repository}})
|
||||
string(REPLACE "-" "_" repo2 ${repo})
|
||||
message("::set-output name=repo::${repo2}")
|
||||
file(APPEND $ENV{GITHUB_OUTPUT} "repo=${repo2}\n")
|
||||
- name: use cache
|
||||
id: cache-ccache
|
||||
uses: nashif/action-s3-cache@master
|
||||
@@ -100,12 +99,12 @@ jobs:
|
||||
|
||||
# We can limit scope to just what has changed
|
||||
if [ -s testplan.csv ]; then
|
||||
echo "::set-output name=report_needed::1";
|
||||
echo "report_needed=1" >> $GITHUB_OUTPUT
|
||||
# Full twister but with options based on changes
|
||||
./scripts/twister --inline-logs -M -N -v --load-tests testplan.csv --retry-failed 2
|
||||
else
|
||||
# if nothing is run, skip reporting step
|
||||
echo "::set-output name=report_needed::0";
|
||||
echo "report_needed=0" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: ccache stats post
|
||||
@@ -114,7 +113,7 @@ jobs:
|
||||
|
||||
- name: Upload Unit Test Results
|
||||
if: always() && steps.twister.outputs.report_needed != 0
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Unit Test Results (Subset ${{ matrix.platform }})
|
||||
path: twister-out/twister.xml
|
||||
@@ -137,7 +136,7 @@ jobs:
|
||||
|
||||
- name: Upload Unit Test Results in HTML
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: HTML Unit Test Results
|
||||
if-no-files-found: ignore
|
||||
|
||||
36
.github/workflows/codecov.yaml
vendored
36
.github/workflows/codecov.yaml
vendored
@@ -4,22 +4,18 @@ on:
|
||||
schedule:
|
||||
- cron: '25 */3 * * 1-5'
|
||||
|
||||
jobs:
|
||||
codecov-prep:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
codecov:
|
||||
runs-on: zephyr_runner
|
||||
needs: codecov-prep
|
||||
runs-on: zephyr-runner-linux-x64-4xlarge
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.21.0
|
||||
options: '--entrypoint /bin/bash'
|
||||
volumes:
|
||||
- /repo-cache/zephyrproject:/github/cache/zephyrproject
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -32,8 +28,14 @@ jobs:
|
||||
run: |
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Clone cached Zephyr repository
|
||||
continue-on-error: true
|
||||
run: |
|
||||
git clone --shared /github/cache/zephyrproject/zephyr .
|
||||
git remote set-url origin ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -54,7 +56,7 @@ jobs:
|
||||
run: |
|
||||
string(REPLACE "/" "_" repo ${{github.repository}})
|
||||
string(REPLACE "-" "_" repo2 ${repo})
|
||||
message("::set-output name=repo::${repo2}")
|
||||
file(APPEND $ENV{GITHUB_OUTPUT} "repo=${repo2}\n")
|
||||
|
||||
- name: use cache
|
||||
id: cache-ccache
|
||||
@@ -94,7 +96,7 @@ jobs:
|
||||
|
||||
- name: Upload Coverage Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Coverage Data (Subset ${{ matrix.platform }})
|
||||
path: coverage/reports/${{ matrix.platform }}.info
|
||||
@@ -108,7 +110,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Download Artifacts
|
||||
@@ -144,8 +146,8 @@ jobs:
|
||||
set(MERGELIST "${MERGELIST} -a ${f}")
|
||||
endif()
|
||||
endforeach()
|
||||
message("::set-output name=mergefiles::${MERGELIST}")
|
||||
message("::set-output name=covfiles::${FILELIST}")
|
||||
file(APPEND $ENV{GITHUB_OUTPUT} "mergefiles=${MERGELIST}\n")
|
||||
file(APPEND $ENV{GITHUB_OUTPUT} "covfiles=${FILELIST}\n")
|
||||
|
||||
- name: Merge coverage files
|
||||
run: |
|
||||
|
||||
6
.github/workflows/coding_guidelines.yml
vendored
6
.github/workflows/coding_guidelines.yml
vendored
@@ -4,17 +4,17 @@ on: pull_request
|
||||
|
||||
jobs:
|
||||
compliance_job:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
name: Run coding guidelines checks on patch series (PR)
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: cache-pip
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-doc-pip
|
||||
|
||||
12
.github/workflows/compliance.yml
vendored
12
.github/workflows/compliance.yml
vendored
@@ -4,11 +4,11 @@ on: pull_request
|
||||
|
||||
jobs:
|
||||
maintainer_check:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
name: Check MAINTAINERS file
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
python3 ./scripts/get_maintainer.py path CMakeLists.txt
|
||||
|
||||
check_compliance:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
name: Run compliance checks on patch series (PR)
|
||||
steps:
|
||||
- name: Update PATH for west
|
||||
@@ -28,13 +28,13 @@ jobs:
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: cache-pip
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-doc-pip
|
||||
@@ -72,7 +72,7 @@ jobs:
|
||||
./scripts/ci/check_compliance.py -m Codeowners -m Devicetree -m Gitlint -m Identity -m Nits -m pylint -m checkpatch -m Kconfig -c origin/${BASE_REF}..
|
||||
|
||||
- name: upload-results
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: True
|
||||
with:
|
||||
name: compliance.xml
|
||||
|
||||
4
.github/workflows/daily_test_version.yml
vendored
4
.github/workflows/daily_test_version.yml
vendored
@@ -12,7 +12,7 @@ on:
|
||||
|
||||
jobs:
|
||||
get_version:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
|
||||
steps:
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
pip3 install gitpython
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
22
.github/workflows/devicetree_checks.yml
vendored
22
.github/workflows/devicetree_checks.yml
vendored
@@ -6,10 +6,16 @@ name: Devicetree script tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v*-branch
|
||||
paths:
|
||||
- 'scripts/dts/**'
|
||||
- '.github/workflows/devicetree_checks.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- v*-branch
|
||||
paths:
|
||||
- 'scripts/dts/**'
|
||||
- '.github/workflows/devicetree_checks.yml'
|
||||
@@ -21,22 +27,22 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
os: [ubuntu-20.04, macos-11, windows-2022]
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
- os: macos-11
|
||||
python-version: 3.6
|
||||
- os: windows-latest
|
||||
- os: windows-2022
|
||||
python-version: 3.6
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: cache-pip-linux
|
||||
if: startsWith(runner.os, 'Linux')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
@@ -44,7 +50,7 @@ jobs:
|
||||
${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
- name: cache-pip-mac
|
||||
if: startsWith(runner.os, 'macOS')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/Library/Caches/pip
|
||||
# Trailing '-' was just to get a different cache name
|
||||
@@ -53,7 +59,7 @@ jobs:
|
||||
${{ runner.os }}-pip-${{ matrix.python-version }}-
|
||||
- name: cache-pip-win
|
||||
if: startsWith(runner.os, 'Windows')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
|
||||
40
.github/workflows/doc-build.yml
vendored
40
.github/workflows/doc-build.yml
vendored
@@ -5,10 +5,10 @@ name: Documentation Build
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 */3 * * *'
|
||||
- cron: '0 */3 * * *'
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
- v*
|
||||
pull_request:
|
||||
paths:
|
||||
- 'doc/**'
|
||||
@@ -35,26 +35,27 @@ env:
|
||||
jobs:
|
||||
doc-build-html:
|
||||
name: "Documentation Build (HTML)"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
|
||||
concurrency:
|
||||
group: doc-build-html-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: install-pkgs
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ninja-build graphviz libclang1-9 libclang-cpp9
|
||||
wget --no-verbose https://www.doxygen.nl/files/doxygen-${DOXYGEN_VERSION}.linux.bin.tar.gz
|
||||
wget --no-verbose https://downloads.sourceforge.net/project/doxygen/rel-${DOXYGEN_VERSION}/doxygen-${DOXYGEN_VERSION}.linux.bin.tar.gz
|
||||
tar xf doxygen-${DOXYGEN_VERSION}.linux.bin.tar.gz
|
||||
echo "${PWD}/doxygen-${DOXYGEN_VERSION}/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: cache-pip
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: pip-${{ hashFiles('scripts/requirements-doc.txt') }}
|
||||
@@ -91,14 +92,31 @@ jobs:
|
||||
tar cfJ html-output.tar.xz --directory=doc/_build html
|
||||
|
||||
- name: upload-build
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: html-output
|
||||
path: html-output.tar.xz
|
||||
|
||||
- name: process-pr
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
REPO_NAME="${{ github.event.repository.name }}"
|
||||
PR_NUM="${{ github.event.pull_request.number }}"
|
||||
DOC_URL="https://builds.zephyrproject.io/${REPO_NAME}/pr/${PR_NUM}/docs/"
|
||||
|
||||
echo "${PR_NUM}" > pr_num
|
||||
echo "::notice:: Documentation will be available shortly at: ${DOC_URL}"
|
||||
|
||||
- name: upload-pr-number
|
||||
uses: actions/upload-artifact@v3
|
||||
if: github.event_name == 'pull_request'
|
||||
with:
|
||||
name: pr_num
|
||||
path: pr_num
|
||||
|
||||
doc-build-pdf:
|
||||
name: "Documentation Build (PDF)"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
container: texlive/texlive:latest
|
||||
timeout-minutes: 30
|
||||
concurrency:
|
||||
@@ -107,7 +125,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: install-pkgs
|
||||
run: |
|
||||
@@ -115,7 +133,7 @@ jobs:
|
||||
apt-get install -y python3-pip ninja-build doxygen graphviz librsvg2-bin
|
||||
|
||||
- name: cache-pip
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: pip-${{ hashFiles('scripts/requirements-doc.txt') }}
|
||||
@@ -142,7 +160,7 @@ jobs:
|
||||
DOC_TAG=${DOC_TAG} SPHINXOPTS="-q -j auto" LATEXMKOPTS="-quiet -halt-on-error" make -C doc pdf
|
||||
|
||||
- name: upload-build
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: pdf-output
|
||||
path: doc/_build/latex/zephyr.pdf
|
||||
|
||||
63
.github/workflows/doc-publish-pr.yml
vendored
Normal file
63
.github/workflows/doc-publish-pr.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
# Copyright (c) 2020 Linaro Limited.
|
||||
# Copyright (c) 2021 Nordic Semiconductor ASA
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Documentation Publish (Pull Request)
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Documentation Build"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
doc-publish:
|
||||
name: Publish Documentation
|
||||
runs-on: ubuntu-20.04
|
||||
if: |
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.repository == 'zephyrproject-rtos/zephyr'
|
||||
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
workflow: doc-build.yml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
|
||||
- name: Load PR number
|
||||
run: |
|
||||
echo "PR_NUM=$(<pr_num/pr_num)" >> $GITHUB_ENV
|
||||
|
||||
- name: Check PR number
|
||||
id: check-pr
|
||||
uses: carpentries/actions/check-valid-pr@v0.8
|
||||
with:
|
||||
pr: ${{ env.PR_NUM }}
|
||||
sha: ${{ github.event.workflow_run.head_sha }}
|
||||
|
||||
- name: Validate PR number
|
||||
if: steps.check-pr.outputs.VALID != 'true'
|
||||
run: |
|
||||
echo "ABORT: PR number validation failed!"
|
||||
exit 1
|
||||
|
||||
- name: Uncompress HTML docs
|
||||
run: |
|
||||
tar xf html-output/html-output.tar.xz -C html-output
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_BUILDS_ZEPHYR_PR_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_BUILDS_ZEPHYR_PR_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Upload to AWS S3
|
||||
env:
|
||||
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch }}
|
||||
run: |
|
||||
aws s3 sync --quiet html-output/html \
|
||||
s3://builds.zephyrproject.org/${{ github.event.repository.name }}/pr/${PR_NUM}/docs \
|
||||
--delete
|
||||
12
.github/workflows/doc-publish.yml
vendored
12
.github/workflows/doc-publish.yml
vendored
@@ -2,23 +2,21 @@
|
||||
# Copyright (c) 2021 Nordic Semiconductor ASA
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
name: Publish Documentation
|
||||
name: Documentation Publish
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Documentation Build"]
|
||||
branches:
|
||||
- main
|
||||
- v*
|
||||
tags:
|
||||
- v*
|
||||
- main
|
||||
- v*
|
||||
types:
|
||||
- completed
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
doc-publish:
|
||||
name: Publish Documentation
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.repository == 'zephyrproject-rtos/zephyr'
|
||||
|
||||
4
.github/workflows/errno.yml
vendored
4
.github/workflows/errno.yml
vendored
@@ -6,13 +6,13 @@ on:
|
||||
|
||||
jobs:
|
||||
check-errno:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.21.0
|
||||
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run errno.py
|
||||
run: |
|
||||
|
||||
17
.github/workflows/footprint-tracking.yml
vendored
17
.github/workflows/footprint-tracking.yml
vendored
@@ -13,19 +13,14 @@ on:
|
||||
# same commit
|
||||
- 'v*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
footprint-tracking-cancel:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
footprint-tracking:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
needs: footprint-tracking-cancel
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.21.0
|
||||
options: '--entrypoint /bin/bash'
|
||||
@@ -44,7 +39,7 @@ jobs:
|
||||
sudo pip3 install -U setuptools wheel pip gitpython
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
21
.github/workflows/footprint.yml
vendored
21
.github/workflows/footprint.yml
vendored
@@ -2,19 +2,14 @@ name: Footprint Delta
|
||||
|
||||
on: pull_request
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
footprint-cancel:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
footprint-delta:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
needs: footprint-cancel
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.21.0
|
||||
options: '--entrypoint /bin/bash'
|
||||
@@ -25,16 +20,12 @@ jobs:
|
||||
CLANG_ROOT_DIR: /usr/lib/llvm-12
|
||||
ZEPHYR_TOOLCHAIN_VARIANT: zephyr
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
- name: Update PATH for west
|
||||
run: |
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
6
.github/workflows/issue_count.yml
vendored
6
.github/workflows/issue_count.yml
vendored
@@ -14,13 +14,13 @@ env:
|
||||
jobs:
|
||||
track-issues:
|
||||
name: "Collect Issue Stats"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
|
||||
steps:
|
||||
- name: Download configuration file
|
||||
run: |
|
||||
wget -q https://raw.githubusercontent.com/$GITHUB_REPOSITORY/master/.github/workflows/issues-report-config.json
|
||||
wget -q https://raw.githubusercontent.com/$GITHUB_REPOSITORY/main/.github/workflows/issues-report-config.json
|
||||
|
||||
- name: install-packages
|
||||
run: |
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: upload-stats
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: True
|
||||
with:
|
||||
name: ${{ env.OUTPUT_FILE_NAME }}
|
||||
|
||||
4
.github/workflows/license_check.yml
vendored
4
.github/workflows/license_check.yml
vendored
@@ -4,7 +4,7 @@ on: [pull_request]
|
||||
|
||||
jobs:
|
||||
scancode_job:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
name: Scan code for licenses
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
with:
|
||||
directory-to-scan: 'scan/'
|
||||
- name: Artifact Upload
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: scancode
|
||||
path: ./artifacts
|
||||
|
||||
5
.github/workflows/manifest.yml
vendored
5
.github/workflows/manifest.yml
vendored
@@ -6,15 +6,16 @@ on:
|
||||
|
||||
jobs:
|
||||
contribs:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
name: Manifest
|
||||
steps:
|
||||
- name: Checkout the code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: zephyrproject/zephyr
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Manifest
|
||||
uses: zephyrproject-rtos/action-manifest@2f1ad2908599d4fe747f886f9d733dd7eebae4ef
|
||||
|
||||
9
.github/workflows/release.yml
vendored
9
.github/workflows/release.yml
vendored
@@ -7,15 +7,16 @@ on:
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get the version
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF#refs/tags/}
|
||||
run: |
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: REUSE Compliance Check
|
||||
uses: fsfe/reuse-action@v1
|
||||
@@ -23,7 +24,7 @@ jobs:
|
||||
args: spdx -o zephyr-${{ steps.get_version.outputs.VERSION }}.spdx
|
||||
|
||||
- name: upload-results
|
||||
uses: actions/upload-artifact@master
|
||||
uses: actions/upload-artifact@v3
|
||||
continue-on-error: True
|
||||
with:
|
||||
name: zephyr-${{ steps.get_version.outputs.VERSION }}.spdx
|
||||
|
||||
2
.github/workflows/stale_issue.yml
vendored
2
.github/workflows/stale_issue.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
jobs:
|
||||
stale:
|
||||
name: Find Stale issues and PRs
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
if: github.repository == 'zephyrproject-rtos/zephyr'
|
||||
steps:
|
||||
- uses: actions/stale@v3
|
||||
|
||||
59
.github/workflows/twister.yaml
vendored
59
.github/workflows/twister.yaml
vendored
@@ -4,31 +4,27 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v*-branch
|
||||
pull_request_target:
|
||||
branches:
|
||||
- main
|
||||
- v*-branch
|
||||
schedule:
|
||||
# Run at 00:00 on Wednesday and Saturday
|
||||
- cron: '0 0 * * 3,6'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
twister-build-cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Cancel Previous Runs
|
||||
uses: styfle/cancel-workflow-action@0.6.0
|
||||
with:
|
||||
access_token: ${{ github.token }}
|
||||
|
||||
twister-build-prep:
|
||||
|
||||
runs-on: zephyr_runner
|
||||
needs: twister-build-cleanup
|
||||
runs-on: zephyr-runner-linux-x64-4xlarge
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.21.0
|
||||
options: '--entrypoint /bin/bash'
|
||||
volumes:
|
||||
- /home/runners/zephyrproject:/github/cache/zephyrproject
|
||||
- /repo-cache/zephyrproject:/github/cache/zephyrproject
|
||||
outputs:
|
||||
subset: ${{ steps.output-services.outputs.subset }}
|
||||
size: ${{ steps.output-services.outputs.size }}
|
||||
@@ -43,17 +39,20 @@ jobs:
|
||||
COMMIT_RANGE: ${{ github.event.pull_request.base.sha }}..${{ github.event.pull_request.head.sha }}
|
||||
BASE_REF: ${{ github.base_ref }}
|
||||
steps:
|
||||
- name: Cleanup
|
||||
- name: Clone cached Zephyr repository
|
||||
if: github.event_name == 'pull_request_target'
|
||||
continue-on-error: true
|
||||
run: |
|
||||
# hotfix, until we have a better way to deal with existing data
|
||||
rm -rf zephyr zephyr-testing
|
||||
git clone --shared /github/cache/zephyrproject/zephyr .
|
||||
git remote set-url origin ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
|
||||
|
||||
- name: Checkout
|
||||
if: github.event_name == 'pull_request_target'
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Environment Setup
|
||||
if: github.event_name == 'pull_request_target'
|
||||
@@ -100,19 +99,19 @@ jobs:
|
||||
else
|
||||
size=0
|
||||
fi
|
||||
echo "::set-output name=subset::${subset}";
|
||||
echo "::set-output name=size::${size}";
|
||||
echo "::set-output name=fullrun::${TWISTER_FULL}";
|
||||
echo "subset=${subset}" >> $GITHUB_OUTPUT
|
||||
echo "size=${size}" >> $GITHUB_OUTPUT
|
||||
echo "fullrun=${TWISTER_FULL}" >> $GITHUB_OUTPUT
|
||||
|
||||
twister-build:
|
||||
runs-on: zephyr_runner
|
||||
runs-on: zephyr-runner-linux-x64-4xlarge
|
||||
needs: twister-build-prep
|
||||
if: needs.twister-build-prep.outputs.size != 0
|
||||
container:
|
||||
image: zephyrprojectrtos/ci:v0.21.0
|
||||
options: '--entrypoint /bin/bash'
|
||||
volumes:
|
||||
- /home/runners/zephyrproject:/github/cache/zephyrproject
|
||||
- /repo-cache/zephyrproject:/github/cache/zephyrproject
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -127,16 +126,18 @@ jobs:
|
||||
COMMIT_RANGE: ${{ github.event.pull_request.base.sha }}..${{ github.event.pull_request.head.sha }}
|
||||
BASE_REF: ${{ github.base_ref }}
|
||||
steps:
|
||||
- name: Cleanup
|
||||
- name: Clone cached Zephyr repository
|
||||
continue-on-error: true
|
||||
run: |
|
||||
# hotfix, until we have a better way to deal with existing data
|
||||
rm -rf zephyr zephyr-testing
|
||||
git clone --shared /github/cache/zephyrproject/zephyr .
|
||||
git remote set-url origin ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Environment Setup
|
||||
run: |
|
||||
@@ -172,7 +173,7 @@ jobs:
|
||||
string(TIMESTAMP current_date "%Y-%m-%d-%H;%M;%S" UTC)
|
||||
string(REPLACE "/" "_" repo ${{github.repository}})
|
||||
string(REPLACE "-" "_" repo2 ${repo})
|
||||
message("::set-output name=repo::${repo2}")
|
||||
file(APPEND $ENV{GITHUB_OUTPUT} "repo=${repo2}\n")
|
||||
|
||||
- name: use cache
|
||||
id: cache-ccache
|
||||
@@ -237,7 +238,7 @@ jobs:
|
||||
|
||||
- name: Upload Unit Test Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Unit Test Results (Subset ${{ matrix.subset }})
|
||||
if-no-files-found: ignore
|
||||
@@ -248,7 +249,7 @@ jobs:
|
||||
twister-test-results:
|
||||
name: "Publish Unit Tests Results"
|
||||
needs: twister-build
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
# the build-and-test job might be skipped, we don't need to run this job then
|
||||
if: success() || failure()
|
||||
|
||||
@@ -266,7 +267,7 @@ jobs:
|
||||
|
||||
- name: Upload Unit Test Results in HTML
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: HTML Unit Test Results
|
||||
if-no-files-found: ignore
|
||||
|
||||
14
.github/workflows/twister_tests.yml
vendored
14
.github/workflows/twister_tests.yml
vendored
@@ -5,12 +5,18 @@ name: Twister TestSuite
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v*-branch
|
||||
paths:
|
||||
- 'scripts/pylib/twister/**'
|
||||
- 'scripts/twister'
|
||||
- 'scripts/tests/twister/**'
|
||||
- '.github/workflows/twister_tests.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- v*-branch
|
||||
paths:
|
||||
- 'scripts/pylib/twister/**'
|
||||
- 'scripts/twister'
|
||||
@@ -24,17 +30,17 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8]
|
||||
os: [ubuntu-latest]
|
||||
os: [ubuntu-20.04]
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: cache-pip-linux
|
||||
if: startsWith(runner.os, 'Linux')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
|
||||
22
.github/workflows/west_cmds.yml
vendored
22
.github/workflows/west_cmds.yml
vendored
@@ -5,11 +5,17 @@ name: Zephyr West Command Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v*-branch
|
||||
paths:
|
||||
- 'scripts/west-commands.yml'
|
||||
- 'scripts/west_commands/**'
|
||||
- '.github/workflows/west_cmds.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- v*-branch
|
||||
paths:
|
||||
- 'scripts/west-commands.yml'
|
||||
- 'scripts/west_commands/**'
|
||||
@@ -22,22 +28,22 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
os: [ubuntu-20.04, macos-11, windows-2022]
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
- os: macos-11
|
||||
python-version: 3.6
|
||||
- os: windows-latest
|
||||
- os: windows-2022
|
||||
python-version: 3.6
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: cache-pip-linux
|
||||
if: startsWith(runner.os, 'Linux')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
@@ -45,7 +51,7 @@ jobs:
|
||||
${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
- name: cache-pip-mac
|
||||
if: startsWith(runner.os, 'macOS')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/Library/Caches/pip
|
||||
# Trailing '-' was just to get a different cache name
|
||||
@@ -54,7 +60,7 @@ jobs:
|
||||
${{ runner.os }}-pip-${{ matrix.python-version }}-
|
||||
- name: cache-pip-win
|
||||
if: startsWith(runner.os, 'Windows')
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
key: ${{ runner.os }}-pip-${{ matrix.python-version }}
|
||||
|
||||
@@ -1417,6 +1417,17 @@ if(CONFIG_BUILD_OUTPUT_ADJUST_LMA)
|
||||
)
|
||||
endif()
|
||||
|
||||
if(NOT CONFIG_EXCEPTIONS)
|
||||
set(eh_frame_section ".eh_frame")
|
||||
else()
|
||||
set(eh_frame_section "")
|
||||
endif()
|
||||
set(remove_sections_argument_list "")
|
||||
foreach(section .comment COMMON ${eh_frame_section})
|
||||
list(APPEND remove_sections_argument_list
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_section_remove>${section})
|
||||
endforeach()
|
||||
|
||||
if(CONFIG_BUILD_OUTPUT_HEX OR BOARD_FLASH_RUNNER STREQUAL openocd)
|
||||
get_property(elfconvert_formats TARGET bintools PROPERTY elfconvert_formats)
|
||||
if(ihex IN_LIST elfconvert_formats)
|
||||
@@ -1426,9 +1437,7 @@ if(CONFIG_BUILD_OUTPUT_HEX OR BOARD_FLASH_RUNNER STREQUAL openocd)
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag>
|
||||
${GAP_FILL}
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_outtarget>ihex
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_section_remove>.comment
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_section_remove>COMMON
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_section_remove>.eh_frame
|
||||
${remove_sections_argument_list}
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_infile>${KERNEL_ELF_NAME}
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_outfile>${KERNEL_HEX_NAME}
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_final>
|
||||
@@ -1450,9 +1459,7 @@ if(CONFIG_BUILD_OUTPUT_BIN)
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag>
|
||||
${GAP_FILL}
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_outtarget>binary
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_section_remove>.comment
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_section_remove>COMMON
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_section_remove>.eh_frame
|
||||
${remove_sections_argument_list}
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_infile>${KERNEL_ELF_NAME}
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_outfile>${KERNEL_BIN_NAME}
|
||||
$<TARGET_PROPERTY:bintools,elfconvert_flag_final>
|
||||
|
||||
@@ -56,7 +56,7 @@ void arch_start_cpu(int cpu_num, k_thread_stack_t *stack, int sz,
|
||||
* arc_cpu_wake_flag will protect arc_cpu_sp that
|
||||
* only one slave cpu can read it per time
|
||||
*/
|
||||
arc_cpu_sp = Z_THREAD_STACK_BUFFER(stack) + sz;
|
||||
arc_cpu_sp = Z_KERNEL_STACK_BUFFER(stack) + sz;
|
||||
|
||||
arc_cpu_wake_flag = cpu_num;
|
||||
|
||||
|
||||
@@ -80,7 +80,7 @@ config IPM_CONSOLE_STACK_SIZE
|
||||
|
||||
config AARCH64_IMAGE_HEADER
|
||||
bool "Add image header"
|
||||
default y if ARM_MMU
|
||||
default y if ARM_MMU || ARM_MPU
|
||||
help
|
||||
This option enables standard ARM64 boot image header used by Linux
|
||||
and understood by loaders such as u-boot on Xen xl tool.
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
#include <device.h>
|
||||
#include <init.h>
|
||||
#include <kernel.h>
|
||||
#include <kernel_arch_func.h>
|
||||
#include <soc.h>
|
||||
#include <arch/arm64/mm.h>
|
||||
#include <linker/linker-defs.h>
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
#include <arch/cpu.h>
|
||||
#include "mmu.h"
|
||||
|
||||
#if CONFIG_MMU_PAGE_SIZE == 4096
|
||||
#if CONFIG_MMU_PAGE_SIZE == 4096 || defined(CONFIG_ARM_MPU)
|
||||
#define HEADER_PGSIZE 1
|
||||
#elif CONFIG_MMU_PAGE_SIZE == 16384
|
||||
#define HEADER_PGSIZE 2
|
||||
|
||||
@@ -129,7 +129,11 @@ void z_arm64_el2_init(void)
|
||||
|
||||
zero_cntvoff_el2(); /* Set 64-bit virtual timer offset to 0 */
|
||||
zero_cnthctl_el2();
|
||||
#ifdef CONFIG_CPU_AARCH64_CORTEX_R
|
||||
zero_cnthps_ctl_el2();
|
||||
#else
|
||||
zero_cnthp_ctl_el2();
|
||||
#endif
|
||||
/*
|
||||
* Enable this if/when we use the hypervisor timer.
|
||||
* write_cnthp_cval_el2(~(uint64_t)0);
|
||||
|
||||
@@ -50,6 +50,7 @@ volatile struct boot_params __aligned(L1_CACHE_BYTES) arm64_cpu_boot_params = {
|
||||
static const uint64_t cpu_node_list[] = {
|
||||
DT_FOREACH_CHILD_STATUS_OKAY(DT_PATH(cpus), CPU_REG_ID)
|
||||
};
|
||||
static uint16_t target_list_mask;
|
||||
|
||||
extern void z_arm64_mm_init(bool is_primary_core);
|
||||
|
||||
@@ -108,6 +109,8 @@ void arch_start_cpu(int cpu_num, k_thread_stack_t *stack, int sz,
|
||||
while (arm64_cpu_boot_params.fn) {
|
||||
wfe();
|
||||
}
|
||||
/* Set secondary cores bit mask */
|
||||
target_list_mask |= 1 << MPIDR_TO_CORE(cpu_mpid);
|
||||
printk("Secondary CPU core %d (MPID:%#llx) is up\n", cpu_num, cpu_mpid);
|
||||
}
|
||||
|
||||
@@ -163,7 +166,8 @@ static void broadcast_ipi(unsigned int ipi)
|
||||
* Send SGI to all cores except itself
|
||||
* Note: Assume only one Cluster now.
|
||||
*/
|
||||
gic_raise_sgi(ipi, mpidr, SGIR_TGT_MASK & ~(1 << MPIDR_TO_CORE(mpidr)));
|
||||
gic_raise_sgi(ipi, mpidr, target_list_mask &
|
||||
~(1 << MPIDR_TO_CORE(mpidr)));
|
||||
}
|
||||
|
||||
void sched_ipi_handler(const void *unused)
|
||||
@@ -219,6 +223,9 @@ static int arm64_smp_init(const struct device *dev)
|
||||
{
|
||||
ARG_UNUSED(dev);
|
||||
|
||||
/* Seting the primary core bit mask */
|
||||
target_list_mask |= 1 << MPIDR_TO_CORE(GET_MPIDR());
|
||||
|
||||
/*
|
||||
* SGI0 is use for sched ipi, this might be changed to use Kconfig
|
||||
* option
|
||||
|
||||
@@ -114,4 +114,18 @@ void arch_new_thread(struct k_thread *thread, k_thread_stack_t *stack,
|
||||
thread->arch.excNestCount = 0;
|
||||
#endif /* CONFIG_LAZY_FPU_SHARING */
|
||||
thread->arch.flags = 0;
|
||||
|
||||
/*
|
||||
* When "eager FPU sharing" mode is enabled, FPU registers must be
|
||||
* initialised at the time of thread creation because the floating-point
|
||||
* context is always active and no further FPU initialisation is performed
|
||||
* later.
|
||||
*/
|
||||
#if defined(CONFIG_EAGER_FPU_SHARING)
|
||||
thread->arch.preempFloatReg.floatRegsUnion.fpRegs.fcw = 0x037f;
|
||||
thread->arch.preempFloatReg.floatRegsUnion.fpRegs.ftw = 0xffff;
|
||||
#if defined(CONFIG_X86_SSE)
|
||||
thread->arch.preempFloatReg.floatRegsUnion.fpRegsEx.mxcsr = 0x1f80;
|
||||
#endif /* CONFIG_X86_SSE */
|
||||
#endif /* CONFIG_EAGER_FPU_SHARING */
|
||||
}
|
||||
|
||||
@@ -198,9 +198,13 @@ zephyr_udc0: &usb {
|
||||
#address-cells = <1>;
|
||||
#size-cells = <1>;
|
||||
|
||||
/* Set all partitions with first 808K of flash */
|
||||
/* last 216K are reseved for M0 usage */
|
||||
/* Configure partitions to make use of the whole 808K */
|
||||
/*
|
||||
* Configure partitions while leaving space for M0 BLE f/w
|
||||
* First 794K are configured for Zephyr to run on M4 core
|
||||
* Last 232K are left for BLE f/w on the M0 core
|
||||
* This partition set up is compatible with use of
|
||||
* stm32wb5x_BLE_Stack_full_fw.bin v1.13.x
|
||||
*/
|
||||
|
||||
boot_partition: partition@0 {
|
||||
label = "mcuboot";
|
||||
@@ -208,19 +212,19 @@ zephyr_udc0: &usb {
|
||||
};
|
||||
slot0_partition: partition@c000 {
|
||||
label = "image-0";
|
||||
reg = <0x0000C000 0x5c000>;
|
||||
reg = <0x0000c000 0x5a000>;
|
||||
};
|
||||
slot1_partition: partition@68000 {
|
||||
slot1_partition: partition@66000 {
|
||||
label = "image-1";
|
||||
reg = <0x00068000 0x5c000>;
|
||||
reg = <0x00066000 0x5a000>;
|
||||
};
|
||||
scratch_partition: partition@c4000 {
|
||||
scratch_partition: partition@c0000 {
|
||||
label = "image-scratch";
|
||||
reg = <0x000c4000 0x4000>;
|
||||
reg = <0x000c0000 0x4000>;
|
||||
};
|
||||
storage_partition: partition@c8000 {
|
||||
storage_partition: partition@c4000 {
|
||||
label = "storage";
|
||||
reg = <0x000c8000 0x2000>;
|
||||
reg = <0x000c4000 0x2000>;
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
set(SUPPORTED_EMU_PLATFORMS armfvp)
|
||||
set(ARMFVP_BIN_NAME FVP_BaseR_AEMv8R)
|
||||
set(ARMFVP_MIN_VERSION 11.16.16)
|
||||
|
||||
set(ARMFVP_FLAGS
|
||||
-C cluster0.has_aarch64=1
|
||||
@@ -12,8 +13,6 @@ set(ARMFVP_FLAGS
|
||||
-C cluster0.gicv3.SRE-enable-action-on-mmap=2
|
||||
-C cluster0.gicv3.SRE-EL2-enable-RAO=1
|
||||
-C cluster0.gicv3.extended-interrupt-range-support=1
|
||||
-C bp.dram.enable_atomic_ops=1
|
||||
-C bp.sram.enable_atomic_ops=1
|
||||
-C gic_distributor.GICD_CTLR-DS-1-means-secure-only=1
|
||||
-C gic_distributor.has-two-security-states=0
|
||||
-C bp.refcounter.non_arch_start_at_default=1
|
||||
|
||||
@@ -27,6 +27,8 @@ To Run the Fixed Virtual Platform simulation tool you must download "Armv8-R AEM
|
||||
FVP" from Arm developer [1]_ (This might require the user to register) and
|
||||
install it on your host PC.
|
||||
|
||||
The current minimum required version of "Armv8-R AEM FVP" is 11.16.16.
|
||||
|
||||
Hardware
|
||||
********
|
||||
|
||||
|
||||
@@ -28,14 +28,14 @@
|
||||
};
|
||||
|
||||
soc {
|
||||
flash0: flash@0 {
|
||||
flash0: flash@88000000 {
|
||||
compatible = "soc-nv-flash";
|
||||
reg = <0x0 DT_SIZE_K(64)>;
|
||||
reg = <0x88000000 DT_SIZE_M(64)>;
|
||||
};
|
||||
|
||||
dram0: memory@10000000 {
|
||||
dram0: memory@0 {
|
||||
compatible = "mmio-dram";
|
||||
reg = <0x10000000 DT_SIZE_K(2048)>;
|
||||
reg = <0x0 DT_SIZE_M(128)>;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
@@ -8,7 +8,6 @@ CONFIG_BOARD_FVP_BASER_AEMV8R=y
|
||||
# Cache management
|
||||
CONFIG_CACHE_MANAGEMENT=y
|
||||
|
||||
CONFIG_ISR_STACK_SIZE=1024
|
||||
CONFIG_THREAD_STACK_INFO=y
|
||||
|
||||
# Enable Timer and Sys clock
|
||||
|
||||
@@ -32,4 +32,7 @@ CONFIG_PM_CPU_OPS=y
|
||||
CONFIG_PM_CPU_OPS_PSCI=n
|
||||
CONFIG_SOC_FVP_AEMV8R_EL2_INIT=y
|
||||
|
||||
CONFIG_SMP=y
|
||||
CONFIG_MP_NUM_CPUS=4
|
||||
|
||||
CONFIG_MAX_THREAD_BYTES=3
|
||||
|
||||
@@ -24,3 +24,4 @@ CONFIG_MP_NUM_CPUS=2
|
||||
CONFIG_CACHE_MANAGEMENT=y
|
||||
CONFIG_TIMEOUT_64BIT=y
|
||||
CONFIG_ARMV8_A_NS=y
|
||||
CONFIG_MAX_THREAD_BYTES=3
|
||||
|
||||
@@ -10,6 +10,25 @@ find_program(
|
||||
NAMES ${ARMFVP_BIN_NAME}
|
||||
)
|
||||
|
||||
if(ARMFVP AND (DEFINED ARMFVP_MIN_VERSION))
|
||||
execute_process(
|
||||
COMMAND ${ARMFVP} --version
|
||||
OUTPUT_VARIABLE out
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
string(REPLACE "\n" "" out ${out})
|
||||
string(REGEX MATCH "[0-9]+\.[0-9]+\.[0-9]+" armfvp_version ${out})
|
||||
if(${armfvp_version} VERSION_LESS ${ARMFVP_MIN_VERSION})
|
||||
set(armfvp_warning_message "Found FVP version is \"${armfvp_version}\", "
|
||||
"the minimum required by the current board is \"${ARMFVP_MIN_VERSION}\".")
|
||||
message(WARNING "${armfvp_warning_message}")
|
||||
set(ARMFVP
|
||||
COMMAND ${CMAKE_COMMAND} -E echo ${armfvp_warning_message}
|
||||
COMMAND ${ARMFVP}
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(CONFIG_ARMV8_A_NS)
|
||||
foreach(filetype BL1 FIP)
|
||||
if ((NOT DEFINED ARMFVP_${filetype}_FILE) AND (EXISTS "$ENV{ARMFVP_${filetype}_FILE}"))
|
||||
@@ -40,8 +59,8 @@ add_custom_target(run_armfvp
|
||||
COMMAND
|
||||
${ARMFVP}
|
||||
${ARMFVP_FLAGS}
|
||||
DEPENDS ${ARMFVP} ${logical_target_for_zephyr_elf}
|
||||
DEPENDS ${logical_target_for_zephyr_elf}
|
||||
WORKING_DIRECTORY ${APPLICATION_BINARY_DIR}
|
||||
COMMENT "FVP: ${ARMFVP}"
|
||||
COMMENT "${ARMFVP_BIN_NAME}: ${armfvp_version}"
|
||||
USES_TERMINAL
|
||||
)
|
||||
|
||||
@@ -51,7 +51,7 @@ elseif("${ARCH}" STREQUAL "arm64")
|
||||
elseif(CONFIG_CPU_CORTEX_A72)
|
||||
set(GCC_M_CPU cortex-a72)
|
||||
elseif(CONFIG_CPU_CORTEX_R82)
|
||||
set(GCC_M_ARCH armv8.4-a)
|
||||
set(GCC_M_ARCH armv8.4-a+nolse)
|
||||
endif()
|
||||
elseif("${ARCH}" STREQUAL "arc")
|
||||
if(CONFIG_CPU_EM4_FPUS)
|
||||
|
||||
@@ -1,8 +1,46 @@
|
||||
:orphan:
|
||||
|
||||
.. _zephyr_3.0:
|
||||
.. _zephyr_3.0.1:
|
||||
|
||||
Zephyr 3.0.0 (Working draft)
|
||||
Zephyr 3.0.1
|
||||
####################
|
||||
|
||||
This is a maintenance release for 3.0.0.
|
||||
|
||||
Security Vulnerability Related
|
||||
******************************
|
||||
|
||||
The following security vulnerabilities (CVEs) were addressed in this
|
||||
release:
|
||||
|
||||
* (N/A)
|
||||
|
||||
More detailed information can be found in:
|
||||
https://docs.zephyrproject.org/latest/security/vulnerabilities.html
|
||||
|
||||
Issues Fixed
|
||||
************
|
||||
|
||||
These GitHub issues were addressed since the previous 3.0.0 tagged
|
||||
release:
|
||||
|
||||
* :github:`43309` - arm64: Fix booting issue with FVP BaseR v8R (version >= 11.16.16)
|
||||
* :github:`43576` - arm64: Fix the broadcast ipi issue when Zephyr SMP runs on Xen
|
||||
* :github:`43845` - arm/arm64: Armv8r misc fix
|
||||
|
||||
Security Vulnerability Related
|
||||
******************************
|
||||
|
||||
The following security vulnerabilities (CVEs) were addressed in this
|
||||
release:
|
||||
|
||||
More detailed information can be found in:
|
||||
https://docs.zephyrproject.org/latest/security/vulnerabilities.html
|
||||
|
||||
|
||||
.. _zephyr_3.0.0:
|
||||
|
||||
Zephyr 3.0.0
|
||||
############################
|
||||
|
||||
We are pleased to announce the release of Zephyr RTOS version 3.0.0.
|
||||
@@ -109,6 +147,58 @@ Removed APIs in this release
|
||||
|
||||
* Removed ``CONFIG_LOG_MINIMAL``. Use ``CONFIG_LOG_MODE_MINIMAL`` instead.
|
||||
|
||||
* STM32 clock_control driver configuration was moved from Kconfig to :ref:`devicetree <dt-guide>`.
|
||||
See the :dtcompatible:`st,stm32-rcc` devicetree binding for more information.
|
||||
As a consequence, following Kconfig symbols were removed:
|
||||
|
||||
* ``CONFIG_CLOCK_STM32_SYSCLK_SRC_HSE``
|
||||
* ``CONFIG_CLOCK_STM32_SYSCLK_SRC_HSI``
|
||||
* ``CONFIG_CLOCK_STM32_SYSCLK_SRC_MSI``
|
||||
* ``CONFIG_CLOCK_STM32_SYSCLK_SRC_PLL``
|
||||
* ``CONFIG_CLOCK_STM32_SYSCLK_SRC_CSI``
|
||||
* ``CONFIG_CLOCK_STM32_HSE_BYPASS``
|
||||
* ``CONFIG_CLOCK_STM32_MSI_RANGE``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_SRC_MSI``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_SRC_HSI``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_SRC_HSE``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_SRC_PLL2``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_SRC_CSI``
|
||||
* ``CONFIG_CLOCK_STM32_AHB_PRESCALER``
|
||||
* ``CONFIG_CLOCK_STM32_APB1_PRESCALER``
|
||||
* ``CONFIG_CLOCK_STM32_APB2_PRESCALER``
|
||||
* ``CONFIG_CLOCK_STM32_CPU1_PRESCALER``
|
||||
* ``CONFIG_CLOCK_STM32_CPU2_PRESCALER``
|
||||
* ``CONFIG_CLOCK_STM32_AHB3_PRESCALER``
|
||||
* ``CONFIG_CLOCK_STM32_AHB4_PRESCALER``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_PREDIV``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_PREDIV1``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_MULTIPLIER``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_XTPRE``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_M_DIVISOR``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_N_MULTIPLIER``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_P_DIVISOR``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_Q_DIVISOR``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_R_DIVISOR``
|
||||
* ``CONFIG_CLOCK_STM32_LSE``
|
||||
* ``CONFIG_CLOCK_STM32_HSI_DIVISOR``
|
||||
* ``CONFIG_CLOCK_STM32_D1CPRE``
|
||||
* ``CONFIG_CLOCK_STM32_HPRE``
|
||||
* ``CONFIG_CLOCK_STM32_D2PPRE1``
|
||||
* ``CONFIG_CLOCK_STM32_D2PPRE2``
|
||||
* ``CONFIG_CLOCK_STM32_D1PPRE``
|
||||
* ``CONFIG_CLOCK_STM32_D3PPRE``
|
||||
* ``CONFIG_CLOCK_STM32_PLL3_ENABLE``
|
||||
* ``CONFIG_CLOCK_STM32_PLL3_M_DIVISOR``
|
||||
* ``CONFIG_CLOCK_STM32_PLL3_N_MULTIPLIER``
|
||||
* ``CONFIG_CLOCK_STM32_PLL3_P_ENABLE``
|
||||
* ``CONFIG_CLOCK_STM32_PLL3_P_DIVISOR``
|
||||
* ``CONFIG_CLOCK_STM32_PLL3_Q_ENABLE``
|
||||
* ``CONFIG_CLOCK_STM32_PLL3_Q_DIVISOR``
|
||||
* ``CONFIG_CLOCK_STM32_PLL3_R_ENABLE``
|
||||
* ``CONFIG_CLOCK_STM32_PLL3_R_DIVISOR``
|
||||
* ``CONFIG_CLOCK_STM32_PLL_DIVISOR``
|
||||
* ``CONFIG_CLOCK_STM32_MSI_PLL_MODE``
|
||||
|
||||
Deprecated in this release
|
||||
==========================
|
||||
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
#include <kernel.h>
|
||||
#include <logging/log.h>
|
||||
|
||||
#include "can_utils.h"
|
||||
|
||||
LOG_MODULE_REGISTER(can_loopback, CONFIG_CAN_LOG_LEVEL);
|
||||
|
||||
struct can_loopback_frame {
|
||||
@@ -55,13 +57,6 @@ static void dispatch_frame(const struct zcan_frame *frame,
|
||||
filter->rx_cb(&frame_tmp, filter->cb_arg);
|
||||
}
|
||||
|
||||
static inline int check_filter_match(const struct zcan_frame *frame,
|
||||
const struct zcan_filter *filter)
|
||||
{
|
||||
return ((filter->id & filter->id_mask) ==
|
||||
(frame->id & filter->id_mask));
|
||||
}
|
||||
|
||||
void tx_thread(void *data_arg, void *arg2, void *arg3)
|
||||
{
|
||||
ARG_UNUSED(arg2);
|
||||
@@ -77,7 +72,7 @@ void tx_thread(void *data_arg, void *arg2, void *arg3)
|
||||
for (int i = 0; i < CONFIG_CAN_MAX_FILTER; i++) {
|
||||
filter = &data->filters[i];
|
||||
if (filter->rx_cb &&
|
||||
check_filter_match(&frame.frame, &filter->filter)) {
|
||||
can_utils_filter_match(&frame.frame, &filter->filter) != 0) {
|
||||
dispatch_frame(&frame.frame, filter);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -540,6 +540,8 @@ static void can_mcan_get_message(struct can_mcan_data *data,
|
||||
int data_length;
|
||||
void *cb_arg;
|
||||
struct can_mcan_rx_fifo_hdr hdr;
|
||||
bool rtr_filter_mask;
|
||||
bool rtr_filter;
|
||||
|
||||
while ((*fifo_status_reg & CAN_MCAN_RXF0S_F0FL)) {
|
||||
get_idx = (*fifo_status_reg & CAN_MCAN_RXF0S_F0GI) >>
|
||||
@@ -568,11 +570,17 @@ static void can_mcan_get_message(struct can_mcan_data *data,
|
||||
|
||||
filt_idx = hdr.fidx;
|
||||
|
||||
/* Check if RTR must match */
|
||||
if ((hdr.xtd && data->ext_filt_rtr_mask & (1U << filt_idx) &&
|
||||
((data->ext_filt_rtr >> filt_idx) & 1U) != frame.rtr) ||
|
||||
(data->std_filt_rtr_mask & (1U << filt_idx) &&
|
||||
((data->std_filt_rtr >> filt_idx) & 1U) != frame.rtr)) {
|
||||
if (hdr.xtd != 0) {
|
||||
rtr_filter_mask = (data->ext_filt_rtr_mask & BIT(filt_idx)) != 0;
|
||||
rtr_filter = (data->ext_filt_rtr & BIT(filt_idx)) != 0;
|
||||
} else {
|
||||
rtr_filter_mask = (data->std_filt_rtr_mask & BIT(filt_idx)) != 0;
|
||||
rtr_filter = (data->std_filt_rtr & BIT(filt_idx)) != 0;
|
||||
}
|
||||
|
||||
if (rtr_filter_mask && (rtr_filter != frame.rtr)) {
|
||||
/* RTR bit does not match filter RTR mask and bit, drop frame */
|
||||
*fifo_ack_reg = get_idx;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -261,13 +261,11 @@ static void mcux_flexcan_copy_zfilter_to_mbconfig(const struct zcan_filter *src,
|
||||
if (src->id_type == CAN_STANDARD_IDENTIFIER) {
|
||||
dest->format = kFLEXCAN_FrameFormatStandard;
|
||||
dest->id = FLEXCAN_ID_STD(src->id);
|
||||
*mask = FLEXCAN_RX_MB_STD_MASK(src->id_mask,
|
||||
src->rtr & src->rtr_mask, 1);
|
||||
*mask = FLEXCAN_RX_MB_STD_MASK(src->id_mask, src->rtr_mask, 1);
|
||||
} else {
|
||||
dest->format = kFLEXCAN_FrameFormatExtend;
|
||||
dest->id = FLEXCAN_ID_EXT(src->id);
|
||||
*mask = FLEXCAN_RX_MB_EXT_MASK(src->id_mask,
|
||||
src->rtr & src->rtr_mask, 1);
|
||||
*mask = FLEXCAN_RX_MB_EXT_MASK(src->id_mask, src->rtr_mask, 1);
|
||||
}
|
||||
|
||||
if ((src->rtr & src->rtr_mask) == CAN_DATAFRAME) {
|
||||
@@ -661,6 +659,7 @@ static inline void mcux_flexcan_transfer_rx_idle(const struct device *dev,
|
||||
static FLEXCAN_CALLBACK(mcux_flexcan_transfer_callback)
|
||||
{
|
||||
struct mcux_flexcan_data *data = (struct mcux_flexcan_data *)userData;
|
||||
const struct mcux_flexcan_config *config = data->dev->config;
|
||||
/*
|
||||
* The result field can either be a MB index (which is limited to 32 bit
|
||||
* value) or a status flags value, which is 32 bit on some platforms but
|
||||
@@ -680,6 +679,7 @@ static FLEXCAN_CALLBACK(mcux_flexcan_transfer_callback)
|
||||
mcux_flexcan_transfer_error_status(data->dev, status_flags);
|
||||
break;
|
||||
case kStatus_FLEXCAN_TxSwitchToRx:
|
||||
FLEXCAN_TransferAbortReceive(config->base, &data->handle, mb);
|
||||
__fallthrough;
|
||||
case kStatus_FLEXCAN_TxIdle:
|
||||
mcux_flexcan_transfer_tx_idle(data->dev, mb);
|
||||
|
||||
@@ -1057,7 +1057,7 @@ static int eth_stm32_hal_set_config(const struct device *dev,
|
||||
break;
|
||||
}
|
||||
|
||||
return -ENOTSUP;
|
||||
return ret;
|
||||
}
|
||||
|
||||
#if defined(CONFIG_PTP_CLOCK_STM32_HAL)
|
||||
|
||||
@@ -81,7 +81,7 @@ static inline int z_vrfy_i2c_slave_driver_register(const struct device *dev)
|
||||
static inline int z_vrfy_i2c_slave_driver_unregister(const struct device *dev)
|
||||
{
|
||||
Z_OOPS(Z_SYSCALL_OBJ(dev, K_OBJ_DRIVER_I2C));
|
||||
return z_vrfy_i2c_slave_driver_unregister(dev);
|
||||
return z_impl_i2c_slave_driver_unregister(dev);
|
||||
}
|
||||
#include <syscalls/i2c_slave_driver_unregister_mrsh.c>
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ static inline int z_vrfy_mbox_send(const struct mbox_channel *channel,
|
||||
|
||||
static inline int z_vrfy_mbox_mtu_get(const struct device *dev)
|
||||
{
|
||||
Z_OOPS(Z_SYSCALL_DRIVER_MBOX(dev, max_data_size_get));
|
||||
Z_OOPS(Z_SYSCALL_DRIVER_MBOX(dev, mtu_get));
|
||||
|
||||
return z_impl_mbox_mtu_get(dev);
|
||||
}
|
||||
|
||||
@@ -69,10 +69,10 @@ static int pinmux_pullup(const struct device *dev, uint32_t pin, uint8_t func)
|
||||
#if SOC_RTCIO_INPUT_OUTPUT_SUPPORTED
|
||||
int rtcio_num = rtc_io_num_map[pin];
|
||||
|
||||
if (rtc_io_desc[rtcio_num].pulldown) {
|
||||
rtcio_hal_pulldown_disable(rtc_io_num_map[pin]);
|
||||
} else if (rtc_io_desc[rtcio_num].pullup) {
|
||||
rtcio_hal_pullup_enable(rtc_io_num_map[pin]);
|
||||
rtcio_hal_pulldown_enable(rtc_io_num_map[pin]);
|
||||
|
||||
if (rtc_io_desc[rtcio_num].pullup) {
|
||||
rtcio_hal_pullup_disable(rtc_io_num_map[pin]);
|
||||
} else {
|
||||
return -ENOTSUP;
|
||||
}
|
||||
@@ -87,9 +87,9 @@ static int pinmux_pullup(const struct device *dev, uint32_t pin, uint8_t func)
|
||||
#if SOC_RTCIO_INPUT_OUTPUT_SUPPORTED
|
||||
int rtcio_num = rtc_io_num_map[pin];
|
||||
|
||||
if (rtc_io_desc[rtcio_num].pulldown) {
|
||||
rtcio_hal_pulldown_disable(rtc_io_num_map[pin]);
|
||||
} else if (rtc_io_desc[rtcio_num].pullup) {
|
||||
rtcio_hal_pulldown_disable(rtc_io_num_map[pin]);
|
||||
|
||||
if (rtc_io_desc[rtcio_num].pullup) {
|
||||
rtcio_hal_pullup_enable(rtc_io_num_map[pin]);
|
||||
} else {
|
||||
return -ENOTSUP;
|
||||
|
||||
@@ -63,7 +63,7 @@
|
||||
clk_lsi: clk-lsi {
|
||||
#clock-cells = <0>;
|
||||
compatible = "fixed-clock";
|
||||
clock-frequency = <DT_FREQ_K(32)>;
|
||||
clock-frequency = <DT_FREQ_K(37)>;
|
||||
status = "disabled";
|
||||
};
|
||||
|
||||
|
||||
@@ -56,6 +56,7 @@
|
||||
MAKE_REG_HELPER(cntfrq_el0);
|
||||
MAKE_REG_HELPER(cnthctl_el2);
|
||||
MAKE_REG_HELPER(cnthp_ctl_el2);
|
||||
MAKE_REG_HELPER(cnthps_ctl_el2);
|
||||
MAKE_REG_HELPER(cntv_ctl_el0)
|
||||
MAKE_REG_HELPER(cntv_cval_el0)
|
||||
MAKE_REG_HELPER(cntvct_el0);
|
||||
|
||||
@@ -94,7 +94,7 @@ void free(void *ptr)
|
||||
(void) sys_mutex_unlock(&z_malloc_heap_mutex);
|
||||
}
|
||||
|
||||
SYS_INIT(malloc_prepare, APPLICATION, CONFIG_KERNEL_INIT_PRIORITY_DEFAULT);
|
||||
SYS_INIT(malloc_prepare, POST_KERNEL, CONFIG_KERNEL_INIT_PRIORITY_DEFAULT);
|
||||
#else /* No malloc arena */
|
||||
void *malloc(size_t size)
|
||||
{
|
||||
|
||||
@@ -134,7 +134,7 @@ static int malloc_prepare(const struct device *unused)
|
||||
return 0;
|
||||
}
|
||||
|
||||
SYS_INIT(malloc_prepare, APPLICATION, CONFIG_KERNEL_INIT_PRIORITY_DEFAULT);
|
||||
SYS_INIT(malloc_prepare, POST_KERNEL, CONFIG_KERNEL_INIT_PRIORITY_DEFAULT);
|
||||
|
||||
/* Current offset from HEAP_BASE of unused memory */
|
||||
LIBC_BSS static size_t heap_sz;
|
||||
|
||||
@@ -5,7 +5,7 @@ envlist=py3
|
||||
deps =
|
||||
setuptools-scm
|
||||
pytest
|
||||
types-PyYAML
|
||||
types-PyYAML==6.0.7
|
||||
mypy
|
||||
setenv =
|
||||
TOXTEMPDIR={envtmpdir}
|
||||
|
||||
@@ -673,9 +673,10 @@ class DeviceHandler(Handler):
|
||||
serial_line = ser.readline()
|
||||
except TypeError:
|
||||
pass
|
||||
# ignore SerialException which may happen during the serial device
|
||||
# power off/on process.
|
||||
except serial.SerialException:
|
||||
ser.close()
|
||||
break
|
||||
pass
|
||||
|
||||
# Just because ser_fileno has data doesn't mean an entire line
|
||||
# is available yet.
|
||||
|
||||
@@ -26,17 +26,17 @@ def parse_args():
|
||||
parser.add_argument('-a', '--all', dest='all',
|
||||
help='Show all bugs squashed', action='store_true')
|
||||
parser.add_argument('-t', '--token', dest='tokenfile',
|
||||
help='File containing GitHub token', metavar='FILE')
|
||||
parser.add_argument('-b', '--begin', dest='begin', help='begin date (YYYY-mm-dd)',
|
||||
metavar='date', type=valid_date_type, required=True)
|
||||
help='File containing GitHub token (alternatively, use GITHUB_TOKEN env variable)', metavar='FILE')
|
||||
parser.add_argument('-s', '--start', dest='start', help='start date (YYYY-mm-dd)',
|
||||
metavar='START_DATE', type=valid_date_type, required=True)
|
||||
parser.add_argument('-e', '--end', dest='end', help='end date (YYYY-mm-dd)',
|
||||
metavar='date', type=valid_date_type, required=True)
|
||||
metavar='END_DATE', type=valid_date_type, required=True)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.end < args.begin:
|
||||
if args.end < args.start:
|
||||
raise ValueError(
|
||||
'end date {} is before begin date {}'.format(args.end, args.begin))
|
||||
'end date {} is before start date {}'.format(args.end, args.start))
|
||||
|
||||
if args.tokenfile:
|
||||
with open(args.tokenfile, 'r') as file:
|
||||
@@ -53,12 +53,12 @@ def parse_args():
|
||||
|
||||
|
||||
class BugBashTally(object):
|
||||
def __init__(self, gh, begin_date, end_date):
|
||||
def __init__(self, gh, start_date, end_date):
|
||||
"""Create a BugBashTally object with the provided Github object,
|
||||
begin datetime object, and end datetime object"""
|
||||
start datetime object, and end datetime object"""
|
||||
self._gh = gh
|
||||
self._repo = gh.get_repo('zephyrproject-rtos/zephyr')
|
||||
self._begin_date = begin_date
|
||||
self._start_date = start_date
|
||||
self._end_date = end_date
|
||||
|
||||
self._issues = []
|
||||
@@ -122,12 +122,12 @@ class BugBashTally(object):
|
||||
|
||||
cutoff = self._end_date + timedelta(1)
|
||||
issues = self._repo.get_issues(state='closed', labels=[
|
||||
'bug'], since=self._begin_date)
|
||||
'bug'], since=self._start_date)
|
||||
|
||||
for i in issues:
|
||||
# the PyGithub API and v3 REST API do not facilitate 'until'
|
||||
# or 'end date' :-/
|
||||
if i.closed_at < self._begin_date or i.closed_at > cutoff:
|
||||
if i.closed_at < self._start_date or i.closed_at > cutoff:
|
||||
continue
|
||||
|
||||
ipr = i.pull_request
|
||||
@@ -167,7 +167,7 @@ def print_top_ten(top_ten):
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
bbt = BugBashTally(Github(args.token), args.begin, args.end)
|
||||
bbt = BugBashTally(Github(args.token), args.start, args.end)
|
||||
if args.all:
|
||||
# print one issue per line
|
||||
issues = bbt.get_issues()
|
||||
|
||||
341
scripts/release/list_backports.py
Executable file
341
scripts/release/list_backports.py
Executable file
@@ -0,0 +1,341 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2022, Meta
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""Query issues in a release branch
|
||||
|
||||
This script searches for issues referenced via pull-requests in a release
|
||||
branch in order to simplify tracking changes such as automated backports,
|
||||
manual backports, security fixes, and stability fixes.
|
||||
|
||||
A formatted report is printed to standard output either in JSON or
|
||||
reStructuredText.
|
||||
|
||||
Since an issue is required for all changes to release branches, merged PRs
|
||||
must have at least one instance of the phrase "Fixes #1234" in the body. This
|
||||
script will throw an error if a PR has been made without an associated issue.
|
||||
|
||||
Usage:
|
||||
./scripts/release/list_backports.py \
|
||||
-t ~/.ghtoken \
|
||||
-b v2.7-branch \
|
||||
-s 2021-12-15 -e 2022-04-22 \
|
||||
-P 45074 -P 45868 -P 44918 -P 41234 -P 41174 \
|
||||
-j | jq . | tee /tmp/backports.json
|
||||
|
||||
GITHUB_TOKEN="<secret>" \
|
||||
./scripts/release/list_backports.py \
|
||||
-b v3.0-branch \
|
||||
-p 43381 \
|
||||
-j | jq . | tee /tmp/backports.json
|
||||
"""
|
||||
|
||||
import argparse
|
||||
from datetime import datetime, timedelta
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
# Requires PyGithub
|
||||
from github import Github
|
||||
|
||||
|
||||
# https://gist.github.com/monkut/e60eea811ef085a6540f
|
||||
def valid_date_type(arg_date_str):
|
||||
"""custom argparse *date* type for user dates values given from the
|
||||
command line"""
|
||||
try:
|
||||
return datetime.strptime(arg_date_str, "%Y-%m-%d")
|
||||
except ValueError:
|
||||
msg = "Given Date ({0}) not valid! Expected format, YYYY-MM-DD!".format(arg_date_str)
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-t', '--token', dest='tokenfile',
|
||||
help='File containing GitHub token (alternatively, use GITHUB_TOKEN env variable)', metavar='FILE')
|
||||
parser.add_argument('-b', '--base', dest='base',
|
||||
help='branch (base) for PRs (e.g. v2.7-branch)', metavar='BRANCH', required=True)
|
||||
parser.add_argument('-j', '--json', dest='json', action='store_true',
|
||||
help='print output in JSON rather than RST')
|
||||
parser.add_argument('-s', '--start', dest='start', help='start date (YYYY-mm-dd)',
|
||||
metavar='START_DATE', type=valid_date_type)
|
||||
parser.add_argument('-e', '--end', dest='end', help='end date (YYYY-mm-dd)',
|
||||
metavar='END_DATE', type=valid_date_type)
|
||||
parser.add_argument("-o", "--org", default="zephyrproject-rtos",
|
||||
help="Github organisation")
|
||||
parser.add_argument('-p', '--include-pull', dest='includes',
|
||||
help='include pull request (can be specified multiple times)',
|
||||
metavar='PR', type=int, action='append', default=[])
|
||||
parser.add_argument('-P', '--exclude-pull', dest='excludes',
|
||||
help='exlude pull request (can be specified multiple times, helpful for version bumps and release notes)',
|
||||
metavar='PR', type=int, action='append', default=[])
|
||||
parser.add_argument("-r", "--repo", default="zephyr",
|
||||
help="Github repository")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.includes:
|
||||
if getattr(args, 'start'):
|
||||
logging.error(
|
||||
'the --start argument should not be used with --include-pull')
|
||||
return None
|
||||
if getattr(args, 'end'):
|
||||
logging.error(
|
||||
'the --end argument should not be used with --include-pull')
|
||||
return None
|
||||
else:
|
||||
if not getattr(args, 'start'):
|
||||
logging.error(
|
||||
'if --include-pr PR is not used, --start START_DATE is required')
|
||||
return None
|
||||
|
||||
if not getattr(args, 'end'):
|
||||
setattr(args, 'end', datetime.now())
|
||||
|
||||
if args.end < args.start:
|
||||
logging.error(
|
||||
f'end date {args.end} is before start date {args.start}')
|
||||
return None
|
||||
|
||||
if args.tokenfile:
|
||||
with open(args.tokenfile, 'r') as file:
|
||||
token = file.read()
|
||||
token = token.strip()
|
||||
else:
|
||||
if 'GITHUB_TOKEN' not in os.environ:
|
||||
raise ValueError('No credentials specified')
|
||||
token = os.environ['GITHUB_TOKEN']
|
||||
|
||||
setattr(args, 'token', token)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
class Backport(object):
|
||||
def __init__(self, repo, base, pulls):
|
||||
self._base = base
|
||||
self._repo = repo
|
||||
self._issues = []
|
||||
self._pulls = pulls
|
||||
|
||||
self._pulls_without_an_issue = []
|
||||
self._pulls_with_invalid_issues = {}
|
||||
|
||||
@staticmethod
|
||||
def by_date_range(repo, base, start_date, end_date, excludes):
|
||||
"""Create a Backport object with the provided repo,
|
||||
base, start datetime object, and end datetime objects, and
|
||||
list of excluded PRs"""
|
||||
|
||||
pulls = []
|
||||
|
||||
unfiltered_pulls = repo.get_pulls(
|
||||
base=base, state='closed')
|
||||
for p in unfiltered_pulls:
|
||||
if not p.merged:
|
||||
# only consider merged backports
|
||||
continue
|
||||
|
||||
if p.closed_at < start_date or p.closed_at >= end_date + timedelta(1):
|
||||
# only concerned with PRs within time window
|
||||
continue
|
||||
|
||||
if p.number in excludes:
|
||||
# skip PRs that have been explicitly excluded
|
||||
continue
|
||||
|
||||
pulls.append(p)
|
||||
|
||||
# paginated_list.sort() does not exist
|
||||
pulls = sorted(pulls, key=lambda x: x.number)
|
||||
|
||||
return Backport(repo, base, pulls)
|
||||
|
||||
@staticmethod
|
||||
def by_included_prs(repo, base, includes):
|
||||
"""Create a Backport object with the provided repo,
|
||||
base, and list of included PRs"""
|
||||
|
||||
pulls = []
|
||||
|
||||
for i in includes:
|
||||
try:
|
||||
p = repo.get_pull(i)
|
||||
except Exception:
|
||||
p = None
|
||||
|
||||
if not p:
|
||||
logging.error(f'{i} is not a valid pull request')
|
||||
return None
|
||||
|
||||
if p.base.ref != base:
|
||||
logging.error(
|
||||
f'{i} is not a valid pull request for base {base} ({p.base.label})')
|
||||
return None
|
||||
|
||||
pulls.append(p)
|
||||
|
||||
# paginated_list.sort() does not exist
|
||||
pulls = sorted(pulls, key=lambda x: x.number)
|
||||
|
||||
return Backport(repo, base, pulls)
|
||||
|
||||
@staticmethod
|
||||
def sanitize_title(title):
|
||||
# TODO: sanitize titles such that they are suitable for both JSON and ReStructured Text
|
||||
# could also automatically fix titles like "Automated backport of PR #1234"
|
||||
return title
|
||||
|
||||
def print(self):
|
||||
for i in self.get_issues():
|
||||
title = Backport.sanitize_title(i.title)
|
||||
# * :github:`38972` - logging: Cleaning references to tracing in logging
|
||||
print(f'* :github:`{i.number}` - {title}')
|
||||
|
||||
def print_json(self):
|
||||
issue_objects = []
|
||||
for i in self.get_issues():
|
||||
obj = {}
|
||||
obj['id'] = i.number
|
||||
obj['title'] = Backport.sanitize_title(i.title)
|
||||
obj['url'] = f'https://github.com/{self._repo.organization.login}/{self._repo.name}/pull/{i.number}'
|
||||
issue_objects.append(obj)
|
||||
|
||||
print(json.dumps(issue_objects))
|
||||
|
||||
def get_pulls(self):
|
||||
return self._pulls
|
||||
|
||||
def get_issues(self):
|
||||
"""Return GitHub issues fixed in the provided date window"""
|
||||
if self._issues:
|
||||
return self._issues
|
||||
|
||||
issue_map = {}
|
||||
self._pulls_without_an_issue = []
|
||||
self._pulls_with_invalid_issues = {}
|
||||
|
||||
for p in self._pulls:
|
||||
# check for issues in this pr
|
||||
issues_for_this_pr = {}
|
||||
with io.StringIO(p.body) as buf:
|
||||
for line in buf.readlines():
|
||||
line = line.strip()
|
||||
match = re.search(r"^Fixes[:]?\s*#([1-9][0-9]*).*", line)
|
||||
if not match:
|
||||
match = re.search(
|
||||
rf"^Fixes[:]?\s*https://github\.com/{self._repo.organization.login}/{self._repo.name}/issues/([1-9][0-9]*).*", line)
|
||||
if not match:
|
||||
continue
|
||||
issue_number = int(match[1])
|
||||
issue = self._repo.get_issue(issue_number)
|
||||
if not issue:
|
||||
if not self._pulls_with_invalid_issues[p.number]:
|
||||
self._pulls_with_invalid_issues[p.number] = [
|
||||
issue_number]
|
||||
else:
|
||||
self._pulls_with_invalid_issues[p.number].append(
|
||||
issue_number)
|
||||
logging.error(
|
||||
f'https://github.com/{self._repo.organization.login}/{self._repo.name}/pull/{p.number} references invalid issue number {issue_number}')
|
||||
continue
|
||||
issues_for_this_pr[issue_number] = issue
|
||||
|
||||
# report prs missing issues later
|
||||
if len(issues_for_this_pr) == 0:
|
||||
logging.error(
|
||||
f'https://github.com/{self._repo.organization.login}/{self._repo.name}/pull/{p.number} does not have an associated issue')
|
||||
self._pulls_without_an_issue.append(p)
|
||||
continue
|
||||
|
||||
# FIXME: when we have upgrade to python3.9+, use "issue_map | issues_for_this_pr"
|
||||
issue_map = {**issue_map, **issues_for_this_pr}
|
||||
|
||||
issues = list(issue_map.values())
|
||||
|
||||
# paginated_list.sort() does not exist
|
||||
issues = sorted(issues, key=lambda x: x.number)
|
||||
|
||||
self._issues = issues
|
||||
|
||||
return self._issues
|
||||
|
||||
def get_pulls_without_issues(self):
|
||||
if self._pulls_without_an_issue:
|
||||
return self._pulls_without_an_issue
|
||||
|
||||
self.get_issues()
|
||||
|
||||
return self._pulls_without_an_issue
|
||||
|
||||
def get_pulls_with_invalid_issues(self):
|
||||
if self._pulls_with_invalid_issues:
|
||||
return self._pulls_with_invalid_issues
|
||||
|
||||
self.get_issues()
|
||||
|
||||
return self._pulls_with_invalid_issues
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
|
||||
if not args:
|
||||
return os.EX_DATAERR
|
||||
|
||||
try:
|
||||
gh = Github(args.token)
|
||||
except Exception:
|
||||
logging.error('failed to authenticate with GitHub')
|
||||
return os.EX_DATAERR
|
||||
|
||||
try:
|
||||
repo = gh.get_repo(args.org + '/' + args.repo)
|
||||
except Exception:
|
||||
logging.error('failed to obtain Github repository')
|
||||
return os.EX_DATAERR
|
||||
|
||||
bp = None
|
||||
if args.includes:
|
||||
bp = Backport.by_included_prs(repo, args.base, set(args.includes))
|
||||
else:
|
||||
bp = Backport.by_date_range(repo, args.base,
|
||||
args.start, args.end, set(args.excludes))
|
||||
|
||||
if not bp:
|
||||
return os.EX_DATAERR
|
||||
|
||||
pulls_with_invalid_issues = bp.get_pulls_with_invalid_issues()
|
||||
if pulls_with_invalid_issues:
|
||||
logging.error('The following PRs link to invalid issues:')
|
||||
for (p, lst) in pulls_with_invalid_issues:
|
||||
logging.error(
|
||||
f'\nhttps://github.com/{repo.organization.login}/{repo.name}/pull/{p.number}: {lst}')
|
||||
return os.EX_DATAERR
|
||||
|
||||
pulls_without_issues = bp.get_pulls_without_issues()
|
||||
if pulls_without_issues:
|
||||
logging.error(
|
||||
'Please ensure the body of each PR to a release branch contains "Fixes #1234"')
|
||||
logging.error('The following PRs are lacking associated issues:')
|
||||
for p in pulls_without_issues:
|
||||
logging.error(
|
||||
f'https://github.com/{repo.organization.login}/{repo.name}/pull/{p.number}')
|
||||
return os.EX_DATAERR
|
||||
|
||||
if args.json:
|
||||
bp.print_json()
|
||||
else:
|
||||
bp.print()
|
||||
|
||||
return os.EX_OK
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@@ -147,10 +147,10 @@ def parse_args():
|
||||
def main():
|
||||
parse_args()
|
||||
|
||||
token = os.environ.get('GH_TOKEN', None)
|
||||
token = os.environ.get('GITHUB_TOKEN', None)
|
||||
if not token:
|
||||
sys.exit("""Github token not set in environment,
|
||||
set the env. variable GH_TOKEN please and retry.""")
|
||||
set the env. variable GITHUB_TOKEN please and retry.""")
|
||||
|
||||
i = Issues(args.org, args.repo, token)
|
||||
|
||||
@@ -213,5 +213,6 @@ set the env. variable GH_TOKEN please and retry.""")
|
||||
f.write("* :github:`{}` - {}\n".format(
|
||||
item['number'], item['title']))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -651,7 +651,7 @@ class ZephyrBinaryRunner(abc.ABC):
|
||||
return b''
|
||||
return subprocess.check_output(cmd, **kwargs)
|
||||
|
||||
def popen_ignore_int(self, cmd: List[str]) -> subprocess.Popen:
|
||||
def popen_ignore_int(self, cmd: List[str], **kwargs) -> subprocess.Popen:
|
||||
'''Spawn a child command, ensuring it ignores SIGINT.
|
||||
|
||||
The returned subprocess.Popen object must be manually terminated.'''
|
||||
@@ -671,7 +671,7 @@ class ZephyrBinaryRunner(abc.ABC):
|
||||
if _DRY_RUN:
|
||||
return _DebugDummyPopen() # type: ignore
|
||||
|
||||
return subprocess.Popen(cmd, creationflags=cflags, preexec_fn=preexec)
|
||||
return subprocess.Popen(cmd, creationflags=cflags, preexec_fn=preexec, **kwargs)
|
||||
|
||||
def ensure_output(self, output_type: str) -> None:
|
||||
'''Ensure self.cfg has a particular output artifact.
|
||||
|
||||
@@ -104,7 +104,7 @@ def mdb_do_run(mdb_runner, command):
|
||||
# core will download the shared image.
|
||||
('-prop=download=2' if i > 0 else '')] +
|
||||
mdb_basic_options + mdb_target + [mdb_runner.elf_name])
|
||||
mdb_runner.check_call(mdb_sub_cmd)
|
||||
mdb_runner.check_call(mdb_sub_cmd, cwd=mdb_runner.build_dir)
|
||||
mdb_multifiles += ('core{}'.format(mdb_runner.cores-1-i) if i == 0 else ',core{}'.format(mdb_runner.cores-1-i))
|
||||
|
||||
# to enable multi-core aware mode for use with the MetaWare debugger,
|
||||
@@ -116,7 +116,7 @@ def mdb_do_run(mdb_runner, command):
|
||||
else:
|
||||
raise ValueError('unsupported cores {}'.format(mdb_runner.cores))
|
||||
|
||||
process = mdb_runner.popen_ignore_int(mdb_cmd)
|
||||
process = mdb_runner.popen_ignore_int(mdb_cmd, cwd=mdb_runner.build_dir)
|
||||
record_cld_pid(mdb_runner, process)
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from unittest.mock import call
|
||||
import pytest
|
||||
|
||||
from runners.mdb import MdbNsimBinaryRunner, MdbHwBinaryRunner
|
||||
from conftest import RC_KERNEL_ELF, RC_BOARD_DIR
|
||||
from conftest import RC_KERNEL_ELF, RC_BOARD_DIR, RC_BUILD_DIR
|
||||
|
||||
|
||||
TEST_DRIVER_CMD = 'mdb'
|
||||
@@ -156,7 +156,7 @@ def require_patch(program):
|
||||
def test_flash_nsim(require, cc, t, gcp, test_case, mdb_nsim):
|
||||
mdb_nsim(test_case['i']).run('flash')
|
||||
assert require.called
|
||||
cc.assert_called_once_with(test_case['o'])
|
||||
cc.assert_called_once_with(test_case['o'], cwd=RC_BUILD_DIR)
|
||||
|
||||
@pytest.mark.parametrize('test_case', TEST_NSIM_DEBUG_CASES)
|
||||
@patch('runners.mdb.get_cld_pid', return_value=(False, -1))
|
||||
@@ -166,7 +166,7 @@ def test_flash_nsim(require, cc, t, gcp, test_case, mdb_nsim):
|
||||
def test_debug_nsim(require, pii, t, gcp, test_case, mdb_nsim):
|
||||
mdb_nsim(test_case['i']).run('debug')
|
||||
assert require.called
|
||||
pii.assert_called_once_with(test_case['o'])
|
||||
pii.assert_called_once_with(test_case['o'], cwd=RC_BUILD_DIR)
|
||||
|
||||
@pytest.mark.parametrize('test_case', TEST_NSIM_MULTICORE_CASES)
|
||||
@patch('runners.mdb.get_cld_pid', return_value=(False, -1))
|
||||
@@ -177,9 +177,9 @@ def test_debug_nsim(require, pii, t, gcp, test_case, mdb_nsim):
|
||||
def test_multicores_nsim(require, pii, cc, t, gcp, test_case, mdb_nsim):
|
||||
mdb_nsim(test_case).run('flash')
|
||||
assert require.called
|
||||
cc_calls = [call(TEST_NSIM_CORE1), call(TEST_NSIM_CORE2)]
|
||||
cc_calls = [call(TEST_NSIM_CORE1, cwd=RC_BUILD_DIR), call(TEST_NSIM_CORE2, cwd=RC_BUILD_DIR)]
|
||||
cc.assert_has_calls(cc_calls)
|
||||
pii.assert_called_once_with(TEST_NSIM_CORES_LAUNCH)
|
||||
pii.assert_called_once_with(TEST_NSIM_CORES_LAUNCH, cwd=RC_BUILD_DIR)
|
||||
|
||||
|
||||
# mdb-hw test cases
|
||||
@@ -191,7 +191,7 @@ def test_multicores_nsim(require, pii, cc, t, gcp, test_case, mdb_nsim):
|
||||
def test_flash_hw(require, cc, t, gcp, test_case, mdb_hw):
|
||||
mdb_hw(test_case['i']).run('flash')
|
||||
assert require.called
|
||||
cc.assert_called_once_with(test_case['o'])
|
||||
cc.assert_called_once_with(test_case['o'], cwd=RC_BUILD_DIR)
|
||||
|
||||
@pytest.mark.parametrize('test_case', TEST_HW_DEBUG_CASES)
|
||||
@patch('runners.mdb.get_cld_pid', return_value=(False, -1))
|
||||
@@ -201,7 +201,7 @@ def test_flash_hw(require, cc, t, gcp, test_case, mdb_hw):
|
||||
def test_debug_hw(require, pii, t, gcp, test_case, mdb_hw):
|
||||
mdb_hw(test_case['i']).run('debug')
|
||||
assert require.called
|
||||
pii.assert_called_once_with(test_case['o'])
|
||||
pii.assert_called_once_with(test_case['o'], cwd=RC_BUILD_DIR)
|
||||
|
||||
@pytest.mark.parametrize('test_case', TEST_HW_MULTICORE_CASES)
|
||||
@patch('runners.mdb.get_cld_pid', return_value=(False, -1))
|
||||
@@ -212,6 +212,6 @@ def test_debug_hw(require, pii, t, gcp, test_case, mdb_hw):
|
||||
def test_multicores_hw(require, pii, cc, t, gcp, test_case, mdb_hw):
|
||||
mdb_hw(test_case).run('flash')
|
||||
assert require.called
|
||||
cc_calls = [call(TEST_HW_CORE1), call(TEST_HW_CORE2)]
|
||||
cc_calls = [call(TEST_HW_CORE1, cwd=RC_BUILD_DIR), call(TEST_HW_CORE2, cwd=RC_BUILD_DIR)]
|
||||
cc.assert_has_calls(cc_calls)
|
||||
pii.assert_called_once_with(TEST_HW_CORES_LAUNCH)
|
||||
pii.assert_called_once_with(TEST_HW_CORES_LAUNCH, cwd=RC_BUILD_DIR)
|
||||
|
||||
@@ -128,7 +128,7 @@ Created: {datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")}
|
||||
|
||||
# write other license info, if any
|
||||
if len(doc.customLicenseIDs) > 0:
|
||||
for lic in list(doc.customLicenseIDs).sort():
|
||||
for lic in sorted(list(doc.customLicenseIDs)):
|
||||
writeOtherLicenseSPDX(f, lic)
|
||||
|
||||
# Open SPDX document file for writing, write the document, and calculate
|
||||
|
||||
@@ -241,12 +241,12 @@ void arch_start_cpu(int cpu_num, k_thread_stack_t *stack, int sz,
|
||||
|
||||
sr.cpu = cpu_num;
|
||||
sr.fn = fn;
|
||||
sr.stack_top = Z_THREAD_STACK_BUFFER(stack) + sz;
|
||||
sr.stack_top = Z_KERNEL_STACK_BUFFER(stack) + sz;
|
||||
sr.arg = arg;
|
||||
sr.vecbase = vb;
|
||||
sr.alive = &alive_flag;
|
||||
|
||||
appcpu_top = Z_THREAD_STACK_BUFFER(stack) + sz;
|
||||
appcpu_top = Z_KERNEL_STACK_BUFFER(stack) + sz;
|
||||
|
||||
start_rec = &sr;
|
||||
|
||||
|
||||
@@ -115,7 +115,7 @@ void arch_start_cpu(int cpu_num, k_thread_stack_t *stack, int sz,
|
||||
start_rec.fn = fn;
|
||||
start_rec.arg = arg;
|
||||
|
||||
z_mp_stack_top = Z_THREAD_STACK_BUFFER(stack) + sz;
|
||||
z_mp_stack_top = Z_KERNEL_STACK_BUFFER(stack) + sz;
|
||||
|
||||
soc_start_core(cpu_num);
|
||||
}
|
||||
|
||||
@@ -695,7 +695,7 @@ void sw_switch(uint8_t dir_curr, uint8_t dir_next, uint8_t phy_curr, uint8_t fla
|
||||
hal_radio_sw_switch_coded_tx_config_set(ppi_en, ppi_dis,
|
||||
cc_s2, sw_tifs_toggle);
|
||||
|
||||
} else if (dir_curr == SW_SWITCH_RX) {
|
||||
} else {
|
||||
/* Switching to TX after RX on LE 1M/2M PHY.
|
||||
*
|
||||
* NOTE: PHYEND delay compensation and switching between Coded S2 and S8 PHY
|
||||
|
||||
@@ -4567,6 +4567,7 @@ static inline void event_phy_upd_ind_prep(struct ll_conn *conn,
|
||||
struct lll_conn *lll = &conn->lll;
|
||||
struct node_rx_pdu *rx;
|
||||
uint8_t old_tx, old_rx;
|
||||
uint8_t phy_bitmask;
|
||||
|
||||
/* Acquire additional rx node for Data length notification as
|
||||
* a peripheral.
|
||||
@@ -4596,6 +4597,15 @@ static inline void event_phy_upd_ind_prep(struct ll_conn *conn,
|
||||
conn->llcp_ack = conn->llcp_req;
|
||||
}
|
||||
|
||||
/* supported PHYs mask */
|
||||
phy_bitmask = PHY_1M;
|
||||
if (IS_ENABLED(CONFIG_BT_CTLR_PHY_2M)) {
|
||||
phy_bitmask |= PHY_2M;
|
||||
}
|
||||
if (IS_ENABLED(CONFIG_BT_CTLR_PHY_CODED)) {
|
||||
phy_bitmask |= PHY_CODED;
|
||||
}
|
||||
|
||||
/* apply new phy */
|
||||
old_tx = lll->phy_tx;
|
||||
old_rx = lll->phy_rx;
|
||||
@@ -4609,7 +4619,10 @@ static inline void event_phy_upd_ind_prep(struct ll_conn *conn,
|
||||
#endif /* CONFIG_BT_CTLR_DATA_LENGTH */
|
||||
|
||||
if (conn->llcp.phy_upd_ind.tx) {
|
||||
lll->phy_tx = conn->llcp.phy_upd_ind.tx;
|
||||
if (conn->llcp.phy_upd_ind.tx & phy_bitmask) {
|
||||
lll->phy_tx = conn->llcp.phy_upd_ind.tx &
|
||||
phy_bitmask;
|
||||
}
|
||||
|
||||
#if defined(CONFIG_BT_CTLR_DATA_LENGTH)
|
||||
eff_tx_time = calc_eff_time(lll->max_tx_octets,
|
||||
@@ -4619,7 +4632,10 @@ static inline void event_phy_upd_ind_prep(struct ll_conn *conn,
|
||||
#endif /* CONFIG_BT_CTLR_DATA_LENGTH */
|
||||
}
|
||||
if (conn->llcp.phy_upd_ind.rx) {
|
||||
lll->phy_rx = conn->llcp.phy_upd_ind.rx;
|
||||
if (conn->llcp.phy_upd_ind.rx & phy_bitmask) {
|
||||
lll->phy_rx = conn->llcp.phy_upd_ind.rx &
|
||||
phy_bitmask;
|
||||
}
|
||||
|
||||
#if defined(CONFIG_BT_CTLR_DATA_LENGTH)
|
||||
eff_rx_time =
|
||||
|
||||
@@ -29,6 +29,7 @@
|
||||
|
||||
#define START_PAYLOAD_MAX 20
|
||||
#define CONT_PAYLOAD_MAX 23
|
||||
#define RX_BUFFER_MAX 65
|
||||
|
||||
#define START_LAST_SEG(gpc) (gpc >> 2)
|
||||
#define CONT_SEG_INDEX(gpc) (gpc >> 2)
|
||||
@@ -38,7 +39,8 @@
|
||||
#define LINK_ACK 0x01
|
||||
#define LINK_CLOSE 0x02
|
||||
|
||||
#define XACT_SEG_DATA(_seg) (&link.rx.buf->data[20 + ((_seg - 1) * 23)])
|
||||
#define XACT_SEG_OFFSET(_seg) (20 + ((_seg - 1) * 23))
|
||||
#define XACT_SEG_DATA(_seg) (&link.rx.buf->data[XACT_SEG_OFFSET(_seg)])
|
||||
#define XACT_SEG_RECV(_seg) (link.rx.seg &= ~(1 << (_seg)))
|
||||
|
||||
#define XACT_ID_MAX 0x7f
|
||||
@@ -116,7 +118,7 @@ struct prov_rx {
|
||||
uint8_t gpc;
|
||||
};
|
||||
|
||||
NET_BUF_SIMPLE_DEFINE_STATIC(rx_buf, 65);
|
||||
NET_BUF_SIMPLE_DEFINE_STATIC(rx_buf, RX_BUFFER_MAX);
|
||||
|
||||
static struct pb_adv link = { .rx = { .buf = &rx_buf } };
|
||||
|
||||
@@ -147,7 +149,7 @@ static struct bt_mesh_send_cb buf_sent_cb = {
|
||||
.end = buf_sent,
|
||||
};
|
||||
|
||||
static uint8_t last_seg(uint8_t len)
|
||||
static uint8_t last_seg(uint16_t len)
|
||||
{
|
||||
if (len <= START_PAYLOAD_MAX) {
|
||||
return 0;
|
||||
@@ -383,6 +385,11 @@ static void gen_prov_cont(struct prov_rx *rx, struct net_buf_simple *buf)
|
||||
return;
|
||||
}
|
||||
|
||||
if (XACT_SEG_OFFSET(seg) + buf->len > RX_BUFFER_MAX) {
|
||||
BT_WARN("Rx buffer overflow. Malformed generic prov frame?");
|
||||
return;
|
||||
}
|
||||
|
||||
memcpy(XACT_SEG_DATA(seg), buf->data, buf->len);
|
||||
XACT_SEG_RECV(seg);
|
||||
|
||||
@@ -475,6 +482,13 @@ static void gen_prov_start(struct prov_rx *rx, struct net_buf_simple *buf)
|
||||
return;
|
||||
}
|
||||
|
||||
if (START_LAST_SEG(rx->gpc) != last_seg(link.rx.buf->len)) {
|
||||
BT_ERR("Invalid SegN (%u, calculated %u)", START_LAST_SEG(rx->gpc),
|
||||
last_seg(link.rx.buf->len));
|
||||
prov_failed(PROV_ERR_NVAL_FMT);
|
||||
return;
|
||||
}
|
||||
|
||||
prov_clear_tx();
|
||||
|
||||
link.rx.last_seg = START_LAST_SEG(rx->gpc);
|
||||
|
||||
@@ -161,7 +161,7 @@ int bt_mesh_proxy_msg_send(struct bt_mesh_proxy_role *role, uint8_t type,
|
||||
net_buf_simple_pull(msg, mtu);
|
||||
|
||||
while (msg->len) {
|
||||
if (msg->len + 1 < mtu) {
|
||||
if (msg->len + 1 <= mtu) {
|
||||
net_buf_simple_push_u8(msg, PDU_HDR(SAR_LAST, type));
|
||||
err = role->cb.send(conn, msg->data, msg->len, end, user_data);
|
||||
if (err) {
|
||||
|
||||
@@ -239,14 +239,16 @@ static bool start_http_client(void)
|
||||
int protocol = IPPROTO_TCP;
|
||||
#endif
|
||||
|
||||
(void)memset(&hints, 0, sizeof(hints));
|
||||
|
||||
if (IS_ENABLED(CONFIG_NET_IPV6)) {
|
||||
hints.ai_family = AF_INET6;
|
||||
hints.ai_socktype = SOCK_STREAM;
|
||||
} else if (IS_ENABLED(CONFIG_NET_IPV4)) {
|
||||
hints.ai_family = AF_INET;
|
||||
hints.ai_socktype = SOCK_STREAM;
|
||||
}
|
||||
|
||||
hints.ai_socktype = SOCK_STREAM;
|
||||
|
||||
while (resolve_attempts--) {
|
||||
ret = getaddrinfo(CONFIG_HAWKBIT_SERVER, CONFIG_HAWKBIT_PORT,
|
||||
&hints, &addr);
|
||||
@@ -412,6 +414,8 @@ static int hawkbit_find_cancelAction_base(struct hawkbit_ctl_res *res,
|
||||
return 0;
|
||||
}
|
||||
|
||||
LOG_DBG("_links.%s.href=%s", "cancelAction", href);
|
||||
|
||||
helper = strstr(href, "cancelAction/");
|
||||
if (!helper) {
|
||||
/* A badly formatted cancel base is a server error */
|
||||
@@ -465,6 +469,8 @@ static int hawkbit_find_deployment_base(struct hawkbit_ctl_res *res,
|
||||
return 0;
|
||||
}
|
||||
|
||||
LOG_DBG("_links.%s.href=%s", "deploymentBase", href);
|
||||
|
||||
helper = strstr(href, "deploymentBase/");
|
||||
if (!helper) {
|
||||
/* A badly formatted deployment base is a server error */
|
||||
@@ -573,17 +579,6 @@ static int hawkbit_parse_deployment(struct hawkbit_dep_res *res,
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void hawkbit_dump_base(struct hawkbit_ctl_res *r)
|
||||
{
|
||||
LOG_DBG("config.polling.sleep=%s", log_strdup(r->config.polling.sleep));
|
||||
LOG_DBG("_links.deploymentBase.href=%s",
|
||||
log_strdup(r->_links.deploymentBase.href));
|
||||
LOG_DBG("_links.configData.href=%s",
|
||||
log_strdup(r->_links.configData.href));
|
||||
LOG_DBG("_links.cancelAction.href=%s",
|
||||
log_strdup(r->_links.cancelAction.href));
|
||||
}
|
||||
|
||||
static void hawkbit_dump_deployment(struct hawkbit_dep_res *d)
|
||||
{
|
||||
struct hawkbit_dep_res_chunk *c = &d->deployment.chunks[0];
|
||||
@@ -1143,9 +1138,9 @@ enum hawkbit_response hawkbit_probe(void)
|
||||
if (hawkbit_results.base.config.polling.sleep) {
|
||||
/* Update the sleep time. */
|
||||
hawkbit_update_sleep(&hawkbit_results.base);
|
||||
LOG_DBG("config.polling.sleep=%s", hawkbit_results.base.config.polling.sleep);
|
||||
}
|
||||
|
||||
hawkbit_dump_base(&hawkbit_results.base);
|
||||
|
||||
if (hawkbit_results.base._links.cancelAction.href) {
|
||||
ret = hawkbit_find_cancelAction_base(&hawkbit_results.base,
|
||||
@@ -1172,6 +1167,8 @@ enum hawkbit_response hawkbit_probe(void)
|
||||
}
|
||||
|
||||
if (hawkbit_results.base._links.configData.href) {
|
||||
LOG_DBG("_links.%s.href=%s", "configData",
|
||||
hawkbit_results.base._links.configData.href);
|
||||
memset(hb_context.url_buffer, 0, sizeof(hb_context.url_buffer));
|
||||
hb_context.dl.http_content_size = 0;
|
||||
hb_context.url_buffer_size = URL_BUFFER_SIZE;
|
||||
|
||||
@@ -852,6 +852,7 @@ int net_route_mcast_forward_packet(struct net_pkt *pkt,
|
||||
if (net_send_data(pkt_cpy) >= 0) {
|
||||
++ret;
|
||||
} else {
|
||||
net_pkt_unref(pkt_cpy);
|
||||
--err;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -460,6 +460,11 @@ static ssize_t spair_write(void *obj, const void *buffer, size_t count)
|
||||
}
|
||||
|
||||
if (will_block) {
|
||||
if (k_is_in_isr()) {
|
||||
errno = EAGAIN;
|
||||
res = -1;
|
||||
goto out;
|
||||
}
|
||||
|
||||
for (int signaled = false, result = -1; !signaled;
|
||||
result = -1) {
|
||||
@@ -646,6 +651,11 @@ static ssize_t spair_read(void *obj, void *buffer, size_t count)
|
||||
}
|
||||
|
||||
if (will_block) {
|
||||
if (k_is_in_isr()) {
|
||||
errno = EAGAIN;
|
||||
res = -1;
|
||||
goto out;
|
||||
}
|
||||
|
||||
for (int signaled = false, result = -1; !signaled;
|
||||
result = -1) {
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
tests:
|
||||
arch.x86.info:
|
||||
arch_allow: x86
|
||||
platform_allow: qemu_x86 ehl_crb up_squared
|
||||
harness: console
|
||||
harness_config:
|
||||
type: one_line
|
||||
regex:
|
||||
- "info: complete"
|
||||
filter: CONFIG_X86_PC_COMPATIBLE
|
||||
arch.x86.info.userspace:
|
||||
arch_allow: x86
|
||||
platform_allow: qemu_x86 ehl_crb up_squared
|
||||
extra_configs:
|
||||
- CONFIG_TEST_USERSPACE=y
|
||||
harness: console
|
||||
@@ -17,3 +16,4 @@ tests:
|
||||
type: one_line
|
||||
regex:
|
||||
- "info: complete"
|
||||
filter: CONFIG_X86_PC_COMPATIBLE
|
||||
|
||||
@@ -81,6 +81,28 @@ const struct zcan_frame test_ext_msg_2 = {
|
||||
.data = {1, 2, 3, 4, 5, 6, 7, 8}
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Standard (11-bit) CAN ID RTR frame 1.
|
||||
*/
|
||||
const struct zcan_frame test_std_rtr_msg_1 = {
|
||||
.id_type = CAN_STANDARD_IDENTIFIER,
|
||||
.rtr = CAN_REMOTEREQUEST,
|
||||
.id = TEST_CAN_STD_ID_1,
|
||||
.dlc = 0,
|
||||
.data = {0}
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Extended (29-bit) CAN ID RTR frame 1.
|
||||
*/
|
||||
const struct zcan_frame test_ext_rtr_msg_1 = {
|
||||
.id_type = CAN_EXTENDED_IDENTIFIER,
|
||||
.rtr = CAN_REMOTEREQUEST,
|
||||
.id = TEST_CAN_EXT_ID_1,
|
||||
.dlc = 0,
|
||||
.data = {0}
|
||||
};
|
||||
|
||||
const struct zcan_filter test_std_filter_1 = {
|
||||
.id_type = CAN_STANDARD_IDENTIFIER,
|
||||
.rtr = CAN_DATAFRAME,
|
||||
@@ -146,6 +168,30 @@ const struct zcan_filter test_ext_masked_filter_2 = {
|
||||
.id_mask = TEST_CAN_EXT_MASK
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Standard (11-bit) CAN ID RTR filter 1. This filter matches
|
||||
* ``test_std_rtr_frame_1``.
|
||||
*/
|
||||
const struct zcan_filter test_std_rtr_filter_1 = {
|
||||
.id_type = CAN_STANDARD_IDENTIFIER,
|
||||
.rtr = CAN_REMOTEREQUEST,
|
||||
.id = TEST_CAN_STD_ID_1,
|
||||
.rtr_mask = 1,
|
||||
.id_mask = CAN_STD_ID_MASK
|
||||
};
|
||||
|
||||
/**
|
||||
* @brief Extended (29-bit) CAN ID RTR filter 1. This filter matches
|
||||
* ``test_ext_rtr_frame_1``.
|
||||
*/
|
||||
const struct zcan_filter test_ext_rtr_filter_1 = {
|
||||
.id_type = CAN_EXTENDED_IDENTIFIER,
|
||||
.rtr = CAN_REMOTEREQUEST,
|
||||
.id = TEST_CAN_EXT_ID_1,
|
||||
.rtr_mask = 1,
|
||||
.id_mask = CAN_EXT_ID_MASK
|
||||
};
|
||||
|
||||
const struct zcan_filter test_std_some_filter = {
|
||||
.id_type = CAN_STANDARD_IDENTIFIER,
|
||||
.rtr = CAN_DATAFRAME,
|
||||
@@ -394,6 +440,55 @@ static void send_receive(const struct zcan_filter *filter1,
|
||||
can_remove_rx_filter(can_dev, filter_id_2);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Perform a send/receive test with a set of CAN ID filters and CAN frames, RTR and data
|
||||
* frames.
|
||||
*
|
||||
* @param data_filter CAN data filter
|
||||
* @param rtr_filter CAN RTR filter
|
||||
* @param data_frame CAN data frame
|
||||
* @param rtr_frame CAN RTR frame
|
||||
*/
|
||||
void send_receive_rtr(const struct zcan_filter *data_filter,
|
||||
const struct zcan_filter *rtr_filter,
|
||||
const struct zcan_frame *data_frame,
|
||||
const struct zcan_frame *rtr_frame)
|
||||
{
|
||||
struct zcan_frame frame;
|
||||
int filter_id;
|
||||
int err;
|
||||
|
||||
filter_id = add_rx_msgq(can_dev, rtr_filter);
|
||||
|
||||
/* Verify that RTR filter does not match data frame */
|
||||
send_test_msg(can_dev, data_frame);
|
||||
err = k_msgq_get(&can_msgq, &frame, TEST_RECEIVE_TIMEOUT);
|
||||
zassert_equal(err, -EAGAIN, "Data frame passed RTR filter");
|
||||
|
||||
/* Verify that RTR filter matches RTR frame */
|
||||
send_test_msg(can_dev, rtr_frame);
|
||||
err = k_msgq_get(&can_msgq, &frame, TEST_RECEIVE_TIMEOUT);
|
||||
zassert_equal(err, 0, "receive timeout");
|
||||
check_msg(&frame, rtr_frame, 0);
|
||||
|
||||
can_remove_rx_filter(can_dev, filter_id);
|
||||
|
||||
filter_id = add_rx_msgq(can_dev, data_filter);
|
||||
|
||||
/* Verify that data filter does not match RTR frame */
|
||||
send_test_msg(can_dev, rtr_frame);
|
||||
err = k_msgq_get(&can_msgq, &frame, TEST_RECEIVE_TIMEOUT);
|
||||
zassert_equal(err, -EAGAIN, "RTR frame passed data filter");
|
||||
|
||||
/* Verify that data filter matches data frame */
|
||||
send_test_msg(can_dev, data_frame);
|
||||
err = k_msgq_get(&can_msgq, &frame, TEST_RECEIVE_TIMEOUT);
|
||||
zassert_equal(err, 0, "receive timeout");
|
||||
check_msg(&frame, data_frame, 0);
|
||||
|
||||
can_remove_rx_filter(can_dev, filter_id);
|
||||
}
|
||||
|
||||
/*
|
||||
* Set driver to loopback mode
|
||||
* The driver stays in loopback mode after that.
|
||||
@@ -563,6 +658,24 @@ void test_send_receive_buffer(void)
|
||||
can_remove_rx_filter(can_dev, filter_id);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Test send/receive with standard (11-bit) CAN IDs and remote transmission request (RTR).
|
||||
*/
|
||||
void test_send_receive_std_id_rtr(void)
|
||||
{
|
||||
send_receive_rtr(&test_std_filter_1, &test_std_rtr_filter_1,
|
||||
&test_std_msg_1, &test_std_rtr_msg_1);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Test send/receive with extended (29-bit) CAN IDs and remote transmission request (RTR).
|
||||
*/
|
||||
void test_send_receive_ext_id_rtr(void)
|
||||
{
|
||||
send_receive_rtr(&test_ext_filter_1, &test_ext_rtr_filter_1,
|
||||
&test_ext_msg_1, &test_ext_rtr_msg_1);
|
||||
}
|
||||
|
||||
/*
|
||||
* Add a filter that should not pass the message and send a message
|
||||
* with a different id.
|
||||
@@ -616,6 +729,8 @@ void test_main(void)
|
||||
ztest_unit_test(test_send_receive_ext),
|
||||
ztest_unit_test(test_send_receive_std_masked),
|
||||
ztest_unit_test(test_send_receive_ext_masked),
|
||||
ztest_user_unit_test(test_send_receive_std_id_rtr),
|
||||
ztest_user_unit_test(test_send_receive_ext_id_rtr),
|
||||
ztest_unit_test(test_send_receive_buffer),
|
||||
ztest_unit_test(test_send_receive_wrong_id));
|
||||
ztest_run_test_suite(can_driver);
|
||||
|
||||
@@ -4,6 +4,7 @@ tests:
|
||||
arch_allow: x86
|
||||
extra_configs:
|
||||
- CONFIG_COUNTER_CMOS=y
|
||||
filter: CONFIG_X86_PC_COMPATIBLE
|
||||
|
||||
drivers.counter.mcux.snvs.rtc:
|
||||
tags: drivers
|
||||
|
||||
@@ -57,6 +57,24 @@ static int test_init(const struct device *dev)
|
||||
|
||||
SYS_INIT(test_init, APPLICATION, CONFIG_APPLICATION_INIT_PRIORITY);
|
||||
|
||||
/* Check that global static object constructors are called. */
|
||||
foo_class static_foo(12345678);
|
||||
|
||||
static void test_global_static_ctor(void)
|
||||
{
|
||||
zassert_equal(static_foo.get_foo(), 12345678, NULL);
|
||||
}
|
||||
|
||||
/*
|
||||
* Check that dynamic memory allocation (usually, the C library heap) is
|
||||
* functional when the global static object constructors are called.
|
||||
*/
|
||||
foo_class *static_init_dynamic_foo = new foo_class(87654321);
|
||||
|
||||
static void test_global_static_ctor_dynmem(void)
|
||||
{
|
||||
zassert_equal(static_init_dynamic_foo->get_foo(), 87654321, NULL);
|
||||
}
|
||||
|
||||
static void test_new_delete(void)
|
||||
{
|
||||
@@ -68,6 +86,8 @@ static void test_new_delete(void)
|
||||
void test_main(void)
|
||||
{
|
||||
ztest_test_suite(cpp_tests,
|
||||
ztest_unit_test(test_global_static_ctor),
|
||||
ztest_unit_test(test_global_static_ctor_dynmem),
|
||||
ztest_unit_test(test_new_delete)
|
||||
);
|
||||
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
tests:
|
||||
cpp.main:
|
||||
common:
|
||||
tags: cpp
|
||||
integration_platforms:
|
||||
- mps2_an385
|
||||
tags: cpp
|
||||
- qemu_cortex_a53
|
||||
|
||||
tests:
|
||||
cpp.main.minimal:
|
||||
extra_configs:
|
||||
- CONFIG_MINIMAL_LIBC=y
|
||||
cpp.main.newlib:
|
||||
filter: TOOLCHAIN_HAS_NEWLIB == 1
|
||||
min_ram: 32
|
||||
extra_configs:
|
||||
- CONFIG_NEWLIB_LIBC=y
|
||||
- CONFIG_NEWLIB_LIBC_NANO=n
|
||||
cpp.main.newlib_nano:
|
||||
filter: TOOLCHAIN_HAS_NEWLIB == 1 and CONFIG_HAS_NEWLIB_LIBC_NANO
|
||||
min_ram: 24
|
||||
extra_configs:
|
||||
- CONFIG_NEWLIB_LIBC=y
|
||||
- CONFIG_NEWLIB_LIBC_NANO=y
|
||||
|
||||
Reference in New Issue
Block a user