diff --git a/.github/workflows/assign_pr_to_project.yml b/.github/workflows/assign_pr_to_project.yml index 86707fc9da..e61d281c2a 100644 --- a/.github/workflows/assign_pr_to_project.yml +++ b/.github/workflows/assign_pr_to_project.yml @@ -1,15 +1,48 @@ name: 🔸Auto assign pr on: + workflow_dispatch: + inputs: + pr_number: + type: string + description: "Run workflow for this PR number" + required: true + project_id: + type: string + description: "Github Project Number" + required: true + default: "16" pull_request: types: - opened +env: + GH_TOKEN: ${{ github.token }} + jobs: + get-pr-repo: + runs-on: ubuntu-latest + outputs: + pr_repo_name: ${{ steps.get-repo-name.outputs.repo_name || github.event.pull_request.head.repo.full_name }} + + # INFO `github.event.pull_request.head.repo.full_name` is not available on manual triggered (dispatched) runs + steps: + - name: Get PR repo name + if: ${{ github.event_name == 'workflow_dispatch' }} + id: get-repo-name + run: | + repo_name=$(gh pr view ${{ inputs.pr_number }} --json headRepository,headRepositoryOwner --repo ${{ github.repository }} | jq -r '.headRepositoryOwner.login + "/" + .headRepository.name') + echo "repo_name=$repo_name" >> $GITHUB_OUTPUT + auto-assign-pr: - uses: ynput/ops-repo-automation/.github/workflows/pr_to_project.yml@develop + needs: + - get-pr-repo + if: ${{ needs.get-pr-repo.outputs.pr_repo_name == github.repository }} + uses: ynput/ops-repo-automation/.github/workflows/pr_to_project.yml@main with: repo: "${{ github.repository }}" - project_id: 16 - pull_request_number: ${{ github.event.pull_request.number }} + project_id: ${{ inputs.project_id != '' && fromJSON(inputs.project_id) || 16 }} + pull_request_number: ${{ github.event.pull_request.number || fromJSON(inputs.pr_number) }} secrets: - token: ${{ secrets.YNPUT_BOT_TOKEN }} + # INFO fallback to default `github.token` is required for PRs from forks + # INFO organization secrets won't be available to forks + token: ${{ secrets.YNPUT_BOT_TOKEN || github.token}} diff --git a/.github/workflows/validate_pr_labels.yml b/.github/workflows/validate_pr_labels.yml index 00e5742afe..f25e263c98 100644 --- a/.github/workflows/validate_pr_labels.yml +++ b/.github/workflows/validate_pr_labels.yml @@ -9,7 +9,7 @@ on: jobs: validate-type-label: - uses: ynput/ops-repo-automation/.github/workflows/validate_pr_labels.yml@develop + uses: ynput/ops-repo-automation/.github/workflows/validate_pr_labels.yml@main with: repo: "${{ github.repository }}" pull_request_number: ${{ github.event.pull_request.number }} diff --git a/client/ayon_core/pipeline/delivery.py b/client/ayon_core/pipeline/delivery.py index 366c261e08..55c840f3a5 100644 --- a/client/ayon_core/pipeline/delivery.py +++ b/client/ayon_core/pipeline/delivery.py @@ -387,7 +387,7 @@ def get_representations_delivery_template_data( # convert representation entity. Fixed in 'ayon_api' 1.0.10. if isinstance(template_data, str): con = ayon_api.get_server_api_connection() - repre_entity = con._representation_conversion(repre_entity) + con._representation_conversion(repre_entity) template_data = repre_entity["context"] template_data.update(copy.deepcopy(general_template_data)) diff --git a/client/ayon_core/pipeline/publish/__init__.py b/client/ayon_core/pipeline/publish/__init__.py index ac71239acf..5363e0b378 100644 --- a/client/ayon_core/pipeline/publish/__init__.py +++ b/client/ayon_core/pipeline/publish/__init__.py @@ -3,6 +3,7 @@ ValidateContentsOrder, ValidateSceneOrder, ValidateMeshOrder, + FARM_JOB_ENV_DATA_KEY, ) from .publish_plugins import ( @@ -59,6 +60,7 @@ "ValidateContentsOrder", "ValidateSceneOrder", "ValidateMeshOrder", + "FARM_JOB_ENV_DATA_KEY", "AbstractMetaInstancePlugin", "AbstractMetaContextPlugin", diff --git a/client/ayon_core/pipeline/publish/constants.py b/client/ayon_core/pipeline/publish/constants.py index 5240628365..a33e8f9eed 100644 --- a/client/ayon_core/pipeline/publish/constants.py +++ b/client/ayon_core/pipeline/publish/constants.py @@ -8,3 +8,5 @@ DEFAULT_PUBLISH_TEMPLATE = "default" DEFAULT_HERO_PUBLISH_TEMPLATE = "default" + +FARM_JOB_ENV_DATA_KEY: str = "farmJobEnv" diff --git a/client/ayon_core/plugins/publish/collect_farm_env_variables.py b/client/ayon_core/plugins/publish/collect_farm_env_variables.py new file mode 100644 index 0000000000..cb52e5c32e --- /dev/null +++ b/client/ayon_core/plugins/publish/collect_farm_env_variables.py @@ -0,0 +1,43 @@ +import os + +import pyblish.api + +from ayon_core.lib import get_ayon_username +from ayon_core.pipeline.publish import FARM_JOB_ENV_DATA_KEY + + +class CollectCoreJobEnvVars(pyblish.api.ContextPlugin): + """Collect set of environment variables to submit with deadline jobs""" + order = pyblish.api.CollectorOrder - 0.45 + label = "AYON core Farm Environment Variables" + targets = ["local"] + + def process(self, context): + env = context.data.setdefault(FARM_JOB_ENV_DATA_KEY, {}) + + # Disable colored logs on farm + for key, value in ( + ("AYON_LOG_NO_COLORS", "1"), + ("AYON_PROJECT_NAME", context.data["projectName"]), + ("AYON_FOLDER_PATH", context.data.get("folderPath")), + ("AYON_TASK_NAME", context.data.get("task")), + # NOTE we should use 'context.data["user"]' but that has higher + # order. + ("AYON_USERNAME", get_ayon_username()), + ): + if value: + self.log.debug(f"Setting job env: {key}: {value}") + env[key] = value + + for key in [ + "AYON_BUNDLE_NAME", + "AYON_DEFAULT_SETTINGS_VARIANT", + "AYON_IN_TESTS", + # NOTE Not sure why workdir is needed? + "AYON_WORKDIR", + ]: + value = os.getenv(key) + if value: + self.log.debug(f"Setting job env: {key}: {value}") + env[key] = value + diff --git a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py index 98723beffa..472694d334 100644 --- a/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py +++ b/client/ayon_core/plugins/publish/extract_otio_audio_tracks.py @@ -71,20 +71,18 @@ def add_audio_to_instances(self, audio_file, instances): name = inst.data["folderPath"] recycling_file = [f for f in created_files if name in f] - - # frameranges - timeline_in_h = inst.data["clipInH"] - timeline_out_h = inst.data["clipOutH"] - fps = inst.data["fps"] - - # create duration - duration = (timeline_out_h - timeline_in_h) + 1 + audio_clip = inst.data["otioClip"] + audio_range = audio_clip.range_in_parent() + duration = audio_range.duration.to_frames() # ffmpeg generate new file only if doesn't exists already if not recycling_file: - # convert to seconds - start_sec = float(timeline_in_h / fps) - duration_sec = float(duration / fps) + parent_track = audio_clip.parent() + parent_track_start = parent_track.range_in_parent().start_time + relative_start_time = ( + audio_range.start_time - parent_track_start) + start_sec = relative_start_time.to_seconds() + duration_sec = audio_range.duration.to_seconds() # temp audio file audio_fpath = self.create_temp_file(name) @@ -163,34 +161,36 @@ def get_audio_track_items(self, otio_timeline): output = [] # go trough all audio tracks - for otio_track in otio_timeline.tracks: - if "Audio" not in otio_track.kind: - continue + for otio_track in otio_timeline.audio_tracks(): self.log.debug("_" * 50) playhead = 0 for otio_clip in otio_track: self.log.debug(otio_clip) - if isinstance(otio_clip, otio.schema.Gap): - playhead += otio_clip.source_range.duration.value - elif isinstance(otio_clip, otio.schema.Clip): - start = otio_clip.source_range.start_time.value - duration = otio_clip.source_range.duration.value - fps = otio_clip.source_range.start_time.rate + if (isinstance(otio_clip, otio.schema.Clip) and + not otio_clip.media_reference.is_missing_reference): + media_av_start = otio_clip.available_range().start_time + clip_start = otio_clip.source_range.start_time + fps = clip_start.rate + conformed_av_start = media_av_start.rescaled_to(fps) + # ffmpeg ignores embedded tc + start = clip_start - conformed_av_start + duration = otio_clip.source_range.duration media_path = otio_clip.media_reference.target_url input = { "mediaPath": media_path, "delayFrame": playhead, - "startFrame": start, - "durationFrame": duration, + "startFrame": start.to_frames(), + "durationFrame": duration.to_frames(), "delayMilSec": int(float(playhead / fps) * 1000), - "startSec": float(start / fps), - "durationSec": float(duration / fps), - "fps": fps + "startSec": start.to_seconds(), + "durationSec": duration.to_seconds(), + "fps": float(fps) } if input not in output: output.append(input) self.log.debug("__ input: {}".format(input)) - playhead += otio_clip.source_range.duration.value + + playhead += otio_clip.source_range.duration.value return output diff --git a/client/ayon_core/version.py b/client/ayon_core/version.py index ab8c9424fa..b2ece45120 100644 --- a/client/ayon_core/version.py +++ b/client/ayon_core/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring AYON addon 'core' version.""" -__version__ = "1.0.9+dev" +__version__ = "1.0.10+dev" diff --git a/package.py b/package.py index b90db4cde4..58ae5c08d9 100644 --- a/package.py +++ b/package.py @@ -1,6 +1,6 @@ name = "core" title = "Core" -version = "1.0.9+dev" +version = "1.0.10+dev" client_dir = "ayon_core" diff --git a/pyproject.toml b/pyproject.toml index d09fabf8b2..d7cf9fa6ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ [tool.poetry] name = "ayon-core" -version = "1.0.9+dev" +version = "1.0.10+dev" description = "" authors = ["Ynput Team "] readme = "README.md"