Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CI improvements #342

Merged
merged 5 commits into from
Jun 28, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 6 additions & 23 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ jobs:
python-version: ["3.7", "3.8", "3.9"]
steps:
- uses: actions/checkout@v3
with:
# need to fetch all tags to get a correct version
fetch-depth: 0 # fetch all branches and tags
- uses: conda-incubator/setup-miniconda@v2
with:
channels: conda-forge
Expand Down Expand Up @@ -57,29 +60,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: conda-incubator/setup-miniconda@v2
with:
channels: conda-forge
mamba-version: "*"
activate-environment: cf_xarray_test
auto-update-conda: false
python-version: ${{ matrix.python-version }}
- name: Set up conda environment
shell: bash -l {0}
run: |
mamba env update -f ci/environment-no-optional-deps.yml
python -m pip install -e .
conda list
- name: Run Tests
shell: bash -l {0}
run: |
pytest -n 2

upstream-dev:
name: upstream-dev
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
# need to fetch all tags to get a correct version
fetch-depth: 0 # fetch all branches and tags
- uses: conda-incubator/setup-miniconda@v2
with:
channels: conda-forge
Expand All @@ -90,7 +73,7 @@ jobs:
- name: Set up conda environment
shell: bash -l {0}
run: |
mamba env update -f ci/upstream-dev-env.yml
mamba env update -f ci/environment-no-optional-deps.yml
python -m pip install -e .
conda list
- name: Run Tests
Expand Down
102 changes: 102 additions & 0 deletions .github/workflows/parse_logs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
# type: ignore
import argparse
import functools
import json
import pathlib
import textwrap
from dataclasses import dataclass

from pytest import CollectReport, TestReport


@dataclass
class SessionStart:
pytest_version: str
outcome: str = "status"

@classmethod
def _from_json(cls, json):
json_ = json.copy()
json_.pop("$report_type")
return cls(**json_)


@dataclass
class SessionFinish:
exitstatus: str
outcome: str = "status"

@classmethod
def _from_json(cls, json):
json_ = json.copy()
json_.pop("$report_type")
return cls(**json_)


def parse_record(record):
report_types = {
"TestReport": TestReport,
"CollectReport": CollectReport,
"SessionStart": SessionStart,
"SessionFinish": SessionFinish,
}
cls = report_types.get(record["$report_type"])
if cls is None:
raise ValueError(f"unknown report type: {record['$report_type']}")

return cls._from_json(record)


@functools.singledispatch
def format_summary(report):
return f"{report.nodeid}: {report}"


@format_summary.register
def _(report: TestReport):
message = report.longrepr.chain[0][1].message
return f"{report.nodeid}: {message}"


@format_summary.register
def _(report: CollectReport):
message = report.longrepr.split("\n")[-1].removeprefix("E").lstrip()
return f"{report.nodeid}: {message}"


def format_report(reports, py_version):
newline = "\n"
summaries = newline.join(format_summary(r) for r in reports)
message = textwrap.dedent(
"""\
<details><summary>Python {py_version} Test Summary</summary>

```
{summaries}
```

</details>
"""
).format(summaries=summaries, py_version=py_version)
return message


if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("filepath", type=pathlib.Path)
args = parser.parse_args()

py_version = args.filepath.stem.split("-")[1]

print("Parsing logs ...")

lines = args.filepath.read_text().splitlines()
reports = [parse_record(json.loads(line)) for line in lines]

failed = [report for report in reports if report.outcome == "failed"]

message = format_report(failed, py_version=py_version)

output_file = pathlib.Path("pytest-logs.txt")
print(f"Writing output file to: {output_file.absolute()}")
output_file.write_text(message)
179 changes: 179 additions & 0 deletions .github/workflows/upstream-dev-ci.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
name: CI Upstream
on:
push:
branches:
- main
pull_request:
branches:
- main
schedule:
- cron: "0 0 * * *" # Daily “At 00:00” UTC
workflow_dispatch: # allows you to trigger the workflow run manually

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
detect-ci-trigger:
name: detect upstream-dev ci trigger
runs-on: ubuntu-latest
if: |
github.repository == 'xarray-contrib/cf-xarray'
&& (github.event_name == 'push' || github.event_name == 'pull_request')
outputs:
triggered: ${{ steps.detect-trigger.outputs.trigger-found }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 2
- uses: xarray-contrib/[email protected]
id: detect-trigger
with:
keyword: "[test-upstream]"

upstream-dev:
name: upstream-dev
runs-on: ubuntu-latest
needs: detect-ci-trigger
if: |
always()
&& (
(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch')
|| needs.detect-ci-trigger.outputs.triggered == 'true'
)
defaults:
run:
shell: bash -l {0}
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
outputs:
artifacts_availability: ${{ steps.status.outputs.ARTIFACTS_AVAILABLE }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
- name: Set up conda environment
uses: mamba-org/provision-with-micromamba@34071ca7df4983ccd272ed0d3625818b27b70dcc
with:
environment-file: ci/upstream-dev-env.yml
environment-name: cf_xarray_test
extra-specs: |
python=${{ matrix.python-version }}
pytest-reportlog
- name: Install cf-xarray
run: |
python -m pip install --no-deps -e .
- name: Version info
run: |
conda info -a
conda list
- name: import cf_xarray
run: |
python -c 'import cf_xarray'
- name: Run Tests
if: success()
id: status
run: |
python -m pytest -n 2 -rf \
--report-log output-${{ matrix.python-version }}-log.jsonl \
|| (
echo '::set-output name=ARTIFACTS_AVAILABLE::true' && false
)
- name: Upload artifacts
if: |
failure()
&& steps.status.outcome == 'failure'
&& github.event_name == 'schedule'
&& github.repository == 'xarray-contrib/cf-xarray'
uses: actions/upload-artifact@v3
with:
name: output-${{ matrix.python-version }}-log.jsonl
path: output-${{ matrix.python-version }}-log.jsonl
retention-days: 5

report:
name: report
needs: upstream-dev
if: |
failure()
&& github.event_name == 'schedule'
&& needs.upstream-dev.outputs.artifacts_availability == 'true'
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.x"
- uses: actions/download-artifact@v3
with:
path: /tmp/workspace/logs
- name: Move all log files into a single directory
run: |
rsync -a /tmp/workspace/logs/output-*/ ./logs
ls -R ./logs
- name: install dependencies
run: |
python -m pip install pytest
- name: Parse logs
run: |
shopt -s globstar
python .github/workflows/parse_logs.py logs/**/*-log*
cat pytest-logs.txt
- name: Report failures
uses: actions/github-script@v6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const fs = require('fs');
const pytest_logs = fs.readFileSync('pytest-logs.txt', 'utf8');
const title = "⚠️ Nightly upstream-dev CI failed ⚠️"
const workflow_url = `https://github.com/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}`
const issue_body = `[Workflow Run URL](${workflow_url})\n${pytest_logs}`

// Run GraphQL query against GitHub API to find the most recent open issue used for reporting failures
const query = `query($owner:String!, $name:String!, $creator:String!, $label:String!){
repository(owner: $owner, name: $name) {
issues(first: 1, states: OPEN, filterBy: {createdBy: $creator, labels: [$label]}, orderBy: {field: CREATED_AT, direction: DESC}) {
edges {
node {
body
id
number
}
}
}
}
}`;

const variables = {
owner: context.repo.owner,
name: context.repo.repo,
label: 'CI',
creator: "github-actions[bot]"
}
const result = await github.graphql(query, variables)

// If no issue is open, create a new issue,
// else update the body of the existing issue.
if (result.repository.issues.edges.length === 0) {
github.rest.issues.create({
owner: variables.owner,
repo: variables.name,
body: issue_body,
title: title,
labels: [variables.label]
})
} else {
github.rest.issues.update({
owner: variables.owner,
repo: variables.name,
issue_number: result.repository.issues.edges[0].node.number,
body: issue_body
})
}