Skip to content

Commit

Permalink
Merge branch 'master' into itikhono/align_friendly_names_uniqueization
Browse files Browse the repository at this point in the history
  • Loading branch information
andrei-kochin authored Mar 22, 2024
2 parents 45dd77c + a47e3f6 commit 1d8a3c3
Show file tree
Hide file tree
Showing 12,445 changed files with 140,588 additions and 111,963 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
2 changes: 1 addition & 1 deletion .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@
/tests/layer_tests/tensorflow_tests @openvinotoolkit/openvino-tf-frontend-maintainers
/tests/layer_tests/jax_tests @openvinotoolkit/openvino-tf-frontend-maintainers
/tests/model_hub_tests @openvinotoolkit/openvino-tf-frontend-maintainers
/tests/model_hub_tests/torch_tests @openvinotoolkit/openvino-pytorch-frontend-maintainers
/tests/model_hub_tests/pytorch @openvinotoolkit/openvino-pytorch-frontend-maintainers

# Tools:
/tools/ @openvinotoolkit/openvino-tools-maintainers
Expand Down
1 change: 1 addition & 0 deletions .github/ISSUE_TEMPLATE/good_first_issue.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ body:
value: |
- [Contribution guide - start here!](https://github.com/openvinotoolkit/openvino/blob/master/CONTRIBUTING.md)
- [Intel DevHub Discord channel](https://discord.gg/7pVRxUwdWG) - engage in discussions, ask questions and talk to OpenVINO developers
- [How to link your Pull Request to an issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#manually-linking-a-pull-request-to-an-issue-using-the-pull-request-sidebar)
validations:
required: true

Expand Down
3 changes: 3 additions & 0 deletions .github/components.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,15 @@ CPU:
- TF_FE
- ONNX_FE
build:
- AUTO
- HETERO
- AUTO_BATCH
- TEMPLATE
- IR_FE

GPU:
build:
- AUTO
- HETERO
- AUTO_BATCH
- TEMPLATE
Expand Down Expand Up @@ -149,6 +151,7 @@ PyTorch_FE:
C_API:
build:
- CPU
- GPU
- HETERO
- AUTO_BATCH
- AUTO
Expand Down
3 changes: 3 additions & 0 deletions .github/dependency_review.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,6 @@ fail-on-scopes:
- 'unknown'
license-check: true
vulnerability-check: true
allow-dependencies-licenses:
- 'pkg:pypi/[email protected]'
- 'pkg:pypi/psycopg2-binary'
4 changes: 2 additions & 2 deletions .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@
- 'tests/layer_tests/tensorflow2_keras_tests/**/*'
- 'tests/layer_tests/jax_tests/**/*'
- any: ['tests/model_hub_tests/**',
'!tests/model_hub_tests/torch_tests/**/*']
'!tests/model_hub_tests/pytorch/**/*']

'category: TFL FE':
- 'src/frontends/tensorflow_lite/**/*'
Expand All @@ -156,7 +156,7 @@
- 'tests/layer_tests/py_frontend_tests/test_torch_decoder.py'
- 'tests/layer_tests/py_frontend_tests/test_torch_frontend.py'
- any: ['tests/model_hub_tests/**',
'!tests/model_hub_tests/tf_hub_tests/**/*']
'!tests/model_hub_tests/tensorflow/**/*']

'category: tools':
- any: ['tools/**',
Expand Down
203 changes: 203 additions & 0 deletions .github/scripts/collect_github_metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,203 @@
#!/usr/bin/env python3

from github import Github
from psycopg2 import sql
import os
import logging
import psycopg2
import dateutil
import argparse

def init_logger():
LOGLEVEL = os.environ.get('LOGLEVEL', 'INFO').upper()
logging.basicConfig(level=LOGLEVEL,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d-%Y %H:%M:%S')

def make_parser():
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--repository-name', type=str, required=True,
help='Repository name in OWNER/REPOSITORY format')
parser.add_argument('--run-id', type=str, required=True,
help='Workflow Run ID')

return parser

def create_db_tables(conn, cur):
cur.execute('''CREATE TABLE IF NOT EXISTS workflow_runs(
id SERIAL PRIMARY KEY,
run_id BIGINT,
html_url TEXT,
name VARCHAR(255),
run_started_at TIMESTAMP,
created_at TIMESTAMP,
updated_at TIMESTAMP,
triggering_actor_login VARCHAR(255),
conclusion VARCHAR(25),
event VARCHAR(50),
run_attempt INT,
repository_full_name VARCHAR(255),
head_repository_full_name VARCHAR(255),
head_branch VARCHAR(255),
status VARCHAR(25),
display_title TEXT,
path TEXT,
total_duration_seconds INT
);
''')
cur.execute('''CREATE TABLE IF NOT EXISTS workflow_jobs(
id SERIAL PRIMARY KEY,
job_id BIGINT,
parent_run_id BIGINT,
html_url TEXT,
name VARCHAR(255),
created_at TIMESTAMP,
started_at TIMESTAMP,
completed_at TIMESTAMP,
queued_duration_seconds INT,
duration_seconds INT,
runner_name VARCHAR(255),
status VARCHAR(25),
conclusion VARCHAR(25),
head_branch VARCHAR(255),
run_attempt INT,
workflow_name TEXT
);
''')
cur.execute('''CREATE TABLE IF NOT EXISTS workflow_steps(
id SERIAL PRIMARY KEY,
parent_job_id BIGINT,
name VARCHAR(255),
conclusion VARCHAR(25),
number INT,
started_at TIMESTAMP,
completed_at TIMESTAMP,
duration_seconds INT
);
''')
conn.commit()

def main():
init_logger()
parser = make_parser()
args = parser.parse_args()
logger = logging.getLogger(__name__)

github_token = os.environ.get('GITHUB_TOKEN')
if not github_token:
raise ValueError('GITHUB_TOKEN environment variable is not set!')

run_id = args.run_id
repo_name = args.repository_name


# this should be specified in runner's env
db_username = os.environ.get('PGUSER')
db_password = os.environ.get('PGPASSWORD')
db_host = os.environ.get('PGHOST')
db_database = os.environ.get('PGDATABASE')
db_port = os.environ.get('PGPORT')
conn = psycopg2.connect(host=db_host,
port=db_port,
user=db_username,
password=db_password,
database=db_database)

# Create tables
cur = conn.cursor()
create_db_tables(conn, cur)

# Get the data
g = Github(github_token)
repo = g.get_repo(repo_name)

run = repo.get_workflow_run(int(run_id))
if run.status != 'completed':
logger.error('Run %s is not completed! Only completed runs should be in the database', run_id)
raise SystemExit(1)

# We rely on the following assumptions:
# - The workflow run is completed. When run.status != 'completed' we should not add it to the database
# theoretically the second attempt can be triggerred right after the completion of the first one
# or while the runner which executes this script is deploying
#
# - Job's queued duration equals "job.started_at - job.created_at" if started_at > created_at.
# Otherwise the job should not be added to the database
total_duration_seconds = round(run.timing().run_duration_ms / 1000)
workflow_data_query = f'''INSERT INTO workflow_runs(
run_id, html_url, name,
run_started_at, created_at, updated_at, triggering_actor_login, conclusion,
event, run_attempt, repository_full_name,
head_branch, display_title, path, total_duration_seconds)
VALUES(
'{run_id}', '{run.html_url}', '{run.name}', '{run.run_started_at}',
'{run.created_at}', '{run.updated_at}',
'{run.raw_data['triggering_actor']['login']}',
'{run.conclusion}', '{run.event}',
'{run.run_attempt}', '{run.raw_data['repository']['full_name']}',
'{run.head_branch}', '{run.display_title}', '{run.path}', '{total_duration_seconds}'
);
'''

logger.debug('Workflow run query: %s', workflow_data_query)
cur.execute(workflow_data_query)

for job in run.jobs():
job_id = job.id
queued_duration_seconds = 0
duration_seconds = 0

job_created_at_date = dateutil.parser.parse(job.raw_data['created_at'])
if job_created_at_date > job.started_at:
logger.warning('Skipping job %s of run %s - most likely a stub \
job created after workflow restart', job.name, run_id)
continue

queued_duration_timedelta = job.started_at - job_created_at_date
queued_duration_seconds = round(queued_duration_timedelta.total_seconds())

duration_timedelta = job.completed_at - job.started_at
duration_seconds = round(duration_timedelta.total_seconds())

job_data_query = f'''
INSERT INTO workflow_jobs(
job_id, parent_run_id, html_url, name,
created_at, started_at, completed_at,
queued_duration_seconds, duration_seconds,
runner_name, status, conclusion, head_branch,
run_attempt, workflow_name
)
VALUES(
'{job_id}', '{run_id}', '{job.html_url}', '{job.name}',
'{job.raw_data['created_at']}', '{job.started_at}', '{job.completed_at}',
'{queued_duration_seconds}', '{duration_seconds}',
'{job.raw_data['runner_name']}', '{job.status}', '{job.conclusion}',
'{job.raw_data['head_branch']}', '{job.raw_data['run_attempt']}', '{job.raw_data['workflow_name']}'
);
'''
logger.debug('Job query: %s', job_data_query)
cur.execute(job_data_query)
for step in job.steps:
duration_seconds_timedelta = step.completed_at - step.started_at
duration_seconds = round(duration_seconds_timedelta.total_seconds())

step_data_query = f'''
INSERT INTO workflow_steps(
parent_job_id, name, conclusion,
number, started_at, completed_at,
duration_seconds)
VALUES(
'{job_id}', '{step.name}','{step.conclusion}',
'{step.number}', '{step.started_at}', '{step.completed_at}',
'{duration_seconds}'
);
'''
logger.debug('Step query: %s', step_data_query)
cur.execute(step_data_query)

conn.commit()
cur.close()
conn.close()
g.close()
if __name__ == "__main__":
main()
1 change: 1 addition & 0 deletions .github/scripts/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
python-dateutil==2.9.0.post0
2 changes: 1 addition & 1 deletion .github/workflows/android_arm64.yml
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ jobs:
# Upload build logs
#
- name: Upload build logs
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
if: always()
with:
name: build_logs
Expand Down
10 changes: 5 additions & 5 deletions .github/workflows/build_doc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ jobs:
lfs: 'true'

- name: Install apt-get dependencies
uses: awalsh128/[email protected].1
uses: awalsh128/[email protected].2
with:
packages: graphviz texlive liblua5.2-0 libclang1-9 libclang-cpp9
version: 3.0
Expand Down Expand Up @@ -69,19 +69,19 @@ jobs:
echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV
- name: 'Upload doxygen.log'
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: doxygen_build_log_${{ env.PR_NUMBER }}.log
path: build/docs/doxygen.log

- name: 'Upload sphinx.log'
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: sphinx_build_log_${{ env.PR_NUMBER }}.log
path: build/docs/sphinx.log

- name: 'Upload docs html'
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: openvino_docs_html_${{ env.PR_NUMBER }}.zip
path: build/docs/openvino_docs_html.zip
Expand All @@ -100,7 +100,7 @@ jobs:
- name: 'Upload test results'
if: failure()
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: openvino_docs_pytest
path: build/docs/_artifacts/
8 changes: 2 additions & 6 deletions .github/workflows/code_snippets.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ jobs:
submodules: 'true'

- name: Install OpenCL
uses: awalsh128/[email protected].1
uses: awalsh128/[email protected].2
if: runner.os == 'Linux'
with:
packages: ocl-icd-opencl-dev opencl-headers
Expand All @@ -39,9 +39,5 @@ jobs:
- name: CMake configure
run: cmake -DCMAKE_BUILD_TYPE=Release -DTHREADING=SEQ -B build

- name: Get number of CPU cores
uses: SimenB/github-actions-cpu-cores@v2
id: cpu-cores

- name: Build snippets
run: cmake --build build --target openvino_docs_snippets --parallel ${{ steps.cpu-cores.outputs.count }}
run: cmake --build build --target openvino_docs_snippets --parallel
5 changes: 0 additions & 5 deletions .github/workflows/code_style.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,6 @@ jobs:
sudo apt update
sudo apt --assume-yes install clang-format-9
- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
python3 -m pip install -r ./src/bindings/python/requirements.txt
# Run cmake with -DENABLE_PROFILING_ITT=ON -DSELECTIVE_BUILD=COLLECT in order to enable codestyle check for ITT collector
- name: CMake configure
run: cmake -DENABLE_PYTHON=ON -DENABLE_TESTS=ON -DENABLE_PROFILING_ITT=ON -DSELECTIVE_BUILD=COLLECT -B build
Expand Down
7 changes: 0 additions & 7 deletions .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ jobs:
python3 -m pip install --upgrade pip
python3 -m pip install -r ${{ github.workspace }}/src/bindings/python/wheel/requirements-dev.txt
python3 -m pip install -r ${{ github.workspace }}/src/bindings/python/requirements.txt
# For running Paddle frontend unit tests
python3 -m pip install -r ${{ github.workspace }}/src/frontends/paddle/tests/requirements.txt
# For running ONNX frontend unit tests
Expand All @@ -55,10 +54,6 @@ jobs:
python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_tf2.txt
python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_dev.txt
- name: Get number of CPU cores
uses: SimenB/github-actions-cpu-cores@v2
id: cpu-cores

- name: Build OpenVINO with CMake
uses: ashutoshvarma/action-cmake-build@master
with:
Expand All @@ -82,7 +77,6 @@ jobs:
-DCMAKE_CXX_LINKER_LAUNCHER=ccache
-DENABLE_SYSTEM_SNAPPY=ON
build-type: Release
parallel: ${{ steps.cpu-cores.outputs.count }}

- name: Install wheel packages
run: cmake -DCOMPONENT=python_wheels -DCMAKE_INSTALL_PREFIX=${{ github.workspace }}/install_pkg -P '${{ github.workspace }}/build/cmake_install.cmake'
Expand Down Expand Up @@ -130,7 +124,6 @@ jobs:
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache
-DCMAKE_C_LINKER_LAUNCHER=ccache
-DCMAKE_CXX_LINKER_LAUNCHER=ccache
parallel: ${{ steps.cpu-cores.outputs.count }}
- name: Print info
Expand Down
Loading

0 comments on commit 1d8a3c3

Please sign in to comment.