Compare commits

...

9 Commits

Author SHA1 Message Date
MyNey
50e9b8d24d
Merge fb127cccd2 into b83ca24eb7 2024-11-10 20:10:17 -05:00
sepro
b83ca24eb7
[core] Catch broken Cryptodome installations (#11486)
Authored by: seproDev
2024-11-10 00:53:49 +01:00
bashonly
240a7d43c8
[build] Pin websockets version to >=13.0,<14 (#11488)
websockets 14.0 causes CI test failures (a lot more of them)

Authored by: bashonly
2024-11-09 23:46:47 +00:00
bashonly
f13df591d4
[build] Enable attestations for trusted publishing (#11420)
Reverts 428ffb75aa

Authored by: bashonly
2024-11-09 23:26:02 +00:00
MinePlayersPE
fb127cccd2
Merge branch 'yt-dlp:master' into vidio-live 2021-12-29 13:55:11 +07:00
MinePlayersPE
50306a11eb linter 2021-08-24 09:19:32 +07:00
MinePlayersPE
deab7eb786 Update vidio.py 2021-08-24 09:18:27 +07:00
MinePlayersPE
a70f889927
Merge branch 'yt-dlp:master' into vidio-live 2021-08-24 07:54:05 +07:00
MinePlayersPE
a746212d7b Add live dash support 2021-08-24 07:42:22 +07:00
7 changed files with 72 additions and 11 deletions

View File

@ -504,7 +504,8 @@ jobs:
- windows32 - windows32
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/download-artifact@v4 - name: Download artifacts
uses: actions/download-artifact@v4
with: with:
path: artifact path: artifact
pattern: build-bin-* pattern: build-bin-*

View File

@ -28,3 +28,20 @@ jobs:
actions: write # For cleaning up cache actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing id-token: write # mandatory for trusted publishing
secrets: inherit secrets: inherit
publish_pypi:
needs: [release]
if: vars.MASTER_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -41,3 +41,20 @@ jobs:
actions: write # For cleaning up cache actions: write # For cleaning up cache
id-token: write # mandatory for trusted publishing id-token: write # mandatory for trusted publishing
secrets: inherit secrets: inherit
publish_pypi:
needs: [release]
if: vars.NIGHTLY_PYPI_PROJECT != ''
runs-on: ubuntu-latest
permissions:
id-token: write # mandatory for trusted publishing
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: dist
name: build-pypi
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true

View File

@ -2,10 +2,6 @@ name: Release
on: on:
workflow_call: workflow_call:
inputs: inputs:
prerelease:
required: false
default: true
type: boolean
source: source:
required: false required: false
default: '' default: ''
@ -18,6 +14,10 @@ on:
required: false required: false
default: '' default: ''
type: string type: string
prerelease:
required: false
default: true
type: boolean
workflow_dispatch: workflow_dispatch:
inputs: inputs:
source: source:
@ -278,11 +278,20 @@ jobs:
make clean-cache make clean-cache
python -m build --no-isolation . python -m build --no-isolation .
- name: Upload artifacts
if: github.event_name != 'workflow_dispatch'
uses: actions/upload-artifact@v4
with:
name: build-pypi
path: |
dist/*
compression-level: 0
- name: Publish to PyPI - name: Publish to PyPI
if: github.event_name == 'workflow_dispatch'
uses: pypa/gh-action-pypi-publish@release/v1 uses: pypa/gh-action-pypi-publish@release/v1
with: with:
verbose: true verbose: true
attestations: false # Currently doesn't work w/ reusable workflows (breaks nightly)
publish: publish:
needs: [prepare, build] needs: [prepare, build]

View File

@ -52,7 +52,7 @@ default = [
"pycryptodomex", "pycryptodomex",
"requests>=2.32.2,<3", "requests>=2.32.2,<3",
"urllib3>=1.26.17,<3", "urllib3>=1.26.17,<3",
"websockets>=13.0", "websockets>=13.0,<14",
] ]
curl-cffi = [ curl-cffi = [
"curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'", "curl-cffi==0.5.10; os_name=='nt' and implementation_name=='cpython'",

View File

@ -24,7 +24,7 @@ try:
from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401 from Crypto.Cipher import AES, PKCS1_OAEP, Blowfish, PKCS1_v1_5 # noqa: F401
from Crypto.Hash import CMAC, SHA1 # noqa: F401 from Crypto.Hash import CMAC, SHA1 # noqa: F401
from Crypto.PublicKey import RSA # noqa: F401 from Crypto.PublicKey import RSA # noqa: F401
except ImportError: except (ImportError, OSError):
__version__ = f'broken {__version__}'.strip() __version__ = f'broken {__version__}'.strip()

View File

@ -1,4 +1,5 @@
from .common import InfoExtractor from .common import InfoExtractor
from ..compat import compat_urllib_parse_urlparse
from ..utils import ( from ..utils import (
ExtractorError, ExtractorError,
clean_html, clean_html,
@ -276,8 +277,16 @@ class VidioLiveIE(VidioBaseIE):
display_id, note='Downloading HLS token JSON', data=b'') display_id, note='Downloading HLS token JSON', data=b'')
formats.extend(self._extract_m3u8_formats( formats.extend(self._extract_m3u8_formats(
sources['source'] + '?' + token_json.get('token', ''), display_id, 'mp4', 'm3u8_native')) sources['source'] + '?' + token_json.get('token', ''), display_id, 'mp4', 'm3u8_native'))
if str_or_none(sources.get('source_dash')): if str_or_none(sources.get('source_dash')): # TODO: Find live example with source_dash
pass parsed_base_dash = compat_urllib_parse_urlparse(sources['source_dash'])
token_json = self._download_json(
'https://www.vidio.com/live/%s/tokens?type=dash' % video_id,
display_id, note='Downloading DASH token JSON', data=b'')
parsed_tokenized_dash = parsed_base_dash._replace(path=token_json.get('token', '')
+ (parsed_base_dash.path if parsed_base_dash.path[0] == '/'
else '/' + parsed_base_dash.path))
formats.extend(self._extract_mpd_formats(
parsed_tokenized_dash.geturl(), display_id, 'dash'))
else: else:
if stream_meta.get('stream_token_url'): if stream_meta.get('stream_token_url'):
token_json = self._download_json( token_json = self._download_json(
@ -287,7 +296,15 @@ class VidioLiveIE(VidioBaseIE):
stream_meta['stream_token_url'] + '?' + token_json.get('token', ''), stream_meta['stream_token_url'] + '?' + token_json.get('token', ''),
display_id, 'mp4', 'm3u8_native')) display_id, 'mp4', 'm3u8_native'))
if stream_meta.get('stream_dash_url'): if stream_meta.get('stream_dash_url'):
pass parsed_base_dash = compat_urllib_parse_urlparse(stream_meta['stream_dash_url'])
token_json = self._download_json(
'https://www.vidio.com/live/%s/tokens?type=dash' % video_id,
display_id, note='Downloading DASH token JSON', data=b'')
parsed_tokenized_dash = parsed_base_dash._replace(path=token_json.get('token', '')
+ (parsed_base_dash.path if parsed_base_dash.path[0] == '/'
else '/' + parsed_base_dash.path))
formats.extend(self._extract_mpd_formats(
parsed_tokenized_dash.geturl(), display_id, 'dash'))
if stream_meta.get('stream_url'): if stream_meta.get('stream_url'):
formats.extend(self._extract_m3u8_formats( formats.extend(self._extract_m3u8_formats(
stream_meta['stream_url'], display_id, 'mp4', 'm3u8_native')) stream_meta['stream_url'], display_id, 'mp4', 'm3u8_native'))