Compare commits

...

38 Commits

Author SHA1 Message Date
31eb8abddf update readme
All checks were successful
Build docker container / build (push) Successful in 4m46s
2024-11-26 13:43:12 +01:00
6acb07ed4e update readme and fix 2024-11-26 13:41:02 +01:00
02133f0692 update custom fix
All checks were successful
Build docker container / build (push) Successful in 4m26s
2024-11-26 13:03:55 +01:00
6543f2ce6c update readme + add option to customize api endpoint
All checks were successful
Build docker container / build (push) Successful in 4m43s
2024-11-26 12:27:04 +01:00
e7b49898df fixes movistar plus
All checks were successful
Build docker container / build (push) Successful in 4m32s
2024-11-22 20:49:52 +01:00
99c7d0e2c6 change custom fix for movistarplus.es
All checks were successful
Build docker container / build (push) Successful in 4m58s
2024-11-22 15:46:57 +01:00
a46995b833 update readme for fixes autor attributions 2024-11-15 13:37:30 +01:00
1a73bc2b25 add custom fix and credit fix authors properly
All checks were successful
Build docker container / build (push) Successful in 6m20s
2024-11-14 11:29:24 +01:00
f15b5c36da add funding file for github 2024-11-12 16:21:16 +01:00
ac6aed6aa4 update pickx.be provider
All checks were successful
Build docker container / build (push) Successful in 8m39s
2024-11-08 13:06:32 +01:00
c0905a30f0 add license 2024-10-23 08:53:38 +02:00
8555fae94a update readme 2024-10-22 12:16:34 +02:00
a4575ccf18 update readme
All checks were successful
Build docker container / build (push) Successful in 7m32s
2024-10-15 08:50:59 +02:00
abc9f19ac3 update readme 2024-10-15 08:50:20 +02:00
5e40ec15af update readme 2024-10-15 08:48:54 +02:00
aa9ddceb06 update readme 2024-10-15 08:42:58 +02:00
8f58693ef5 update readme
All checks were successful
Build docker container / build (push) Successful in 8m53s
2024-10-14 16:55:04 +02:00
fd12d6508f update readme
All checks were successful
Build docker container / build (push) Successful in 8m35s
2024-10-14 08:31:08 +02:00
b078fd94ae update readme
All checks were successful
Build docker container / build (push) Successful in 7m12s
2024-10-10 08:57:00 +02:00
30fdcd055d update readme 2024-10-09 13:59:34 +02:00
14526d29e2 update readme 2024-10-09 13:58:35 +02:00
7dd9fab7e6 finalize publish action
All checks were successful
Build docker container / build (push) Successful in 7m2s
2024-10-09 13:25:50 +02:00
65c94b6585 test publish to docker registry
All checks were successful
Build docker container / build (push) Successful in 6m1s
2024-10-09 13:17:15 +02:00
c95f6abf4f testing github action
Some checks failed
Build docker container / build (push) Failing after 5m57s
2024-10-09 13:06:26 +02:00
26db08b189 update action
Some checks failed
Build docker container / build (push) Failing after 7m3s
2024-10-09 12:38:57 +02:00
57501829c1 image improvements + documentation update
Some checks failed
Build docker container / build (push) Has been cancelled
2024-10-09 10:46:08 +02:00
a7e52a2952 update readme
Some checks failed
Build docker container / build (push) Failing after 7m2s
2024-10-07 13:12:36 +02:00
2da2e0e4ab edit readme
All checks were successful
Build docker container / build (push) Successful in 7m14s
2024-10-04 17:13:48 +02:00
f55db2a2f2 edit readme 2024-10-04 17:13:17 +02:00
9dbc13ee24 update readme 2024-10-04 17:10:44 +02:00
bf767c9878 update readme 2024-10-04 17:09:49 +02:00
12b1343bd4 update readme 2024-10-04 17:01:13 +02:00
0e70d67740 edit readme 2024-10-04 17:00:06 +02:00
6e16e71ab5 add custom fixes 2024-10-04 16:58:16 +02:00
f66ad104af update readme
All checks were successful
Build docker container / build (push) Successful in 12m47s
2024-10-02 08:33:55 +02:00
df87aec731 update readme 2024-09-30 08:36:15 +02:00
6c8efd06c6 update versions in readme
All checks were successful
Build docker container / build (push) Successful in 10m33s
2024-09-16 08:40:42 +02:00
d197c843ee update versions in readme
All checks were successful
Build docker container / build (push) Successful in 8m18s
2024-09-09 14:15:37 +02:00
10 changed files with 766 additions and 18 deletions

View File

@ -11,7 +11,7 @@ jobs:
working-directory: ${{ GITHUB_WORKSPACE }} working-directory: ${{ GITHUB_WORKSPACE }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Install Docker - name: Install Docker
run: | run: |
echo "Checking docker installation" echo "Checking docker installation"
@ -23,17 +23,24 @@ jobs:
fi fi
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: https://github.com/docker/setup-buildx-action@v3 uses: https://github.com/docker/setup-buildx-action@v3
- name: Docker login - name: Login to Gitea container registry
uses: https://github.com/docker/login-action@v3 uses: https://github.com/docker/login-action@v3
with: with:
registry: git.claeyscloud.com registry: git.claeyscloud.com
username: nologin username: nologin
password: ${{ secrets.PACKAGE_TOKEN }} password: ${{ secrets.PACKAGE_TOKEN }}
- name: Login to DockerHub container registry
uses: https://github.com/docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USERNAME}}
password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- name: Extract metadata (tags, labels) for Docker - name: Extract metadata (tags, labels) for Docker
id: meta id: meta
uses: docker/metadata-action@v5 uses: docker/metadata-action@v5
with: with:
images: git.claeyscloud.com/david/epg-info images: |
davidquinonescl/epg-info
git.claeyscloud.com/david/epg-info
tags: | tags: |
type=semver,pattern={{raw}} type=semver,pattern={{raw}}
type=sha type=sha

1
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1 @@
github: davidclaeysquinones

View File

@ -2,25 +2,27 @@ FROM node:21-alpine
ARG GIT_REPO=https://github.com/iptv-org/epg.git ARG GIT_REPO=https://github.com/iptv-org/epg.git
ARG GIT_BRANCH=master ARG GIT_BRANCH=master
ENV CRON_SCHEDULE="0 0,12 * * *" ENV CRON_SCHEDULE="0 0,12 * * *"
ENV API_URL="https://iptv-org.github.io/api"
ENV DAYS=14 ENV DAYS=14
ENV MAX_CONNECTIONS=10 ENV MAX_CONNECTIONS=10
ENV ENABLE_FIXES=false
ARG BIN_FOLDER=/bin ARG BIN_FOLDER=/bin
ARG EPG_FOLDER=epg ARG EPG_FOLDER=epg
ARG FIXES_FOLDER_ARG=fixes
ARG START_SCRIPT_ARG=$BIN_FOLDER/$EPG_FOLDER/start.sh ARG START_SCRIPT_ARG=$BIN_FOLDER/$EPG_FOLDER/start.sh
ENV WORKDIR=${BIN_FOLDER}/${EPG_FOLDER} ENV WORKDIR=${BIN_FOLDER}/${EPG_FOLDER}
ENV FIXES_FOLDER=$FIXES_FOLDER_ARG
ENV START_SCRIPT=$START_SCRIPT_ARG ENV START_SCRIPT=$START_SCRIPT_ARG
COPY channels.xml /config/channels.xml COPY channels.xml /config/channels.xml
ADD $FIXES_FOLDER /fixes
RUN apk update \ RUN apk update \
&& apk upgrade --available \ && apk upgrade --available \
&& apk add curl \ && apk add curl git tzdata bash \
&& apk add git \
&& apk add tzdata \
&& apk add bash \
&& npm install -g npm@latest \ && npm install -g npm@latest \
&& npm install pm2 -g \ && npm install pm2 -g \
&& mkdir $(echo "${BIN_FOLDER}/${EPG_FOLDER}") -p \ && mkdir $(echo "${BIN_FOLDER}/${EPG_FOLDER}") -p \
&& git -C $(echo "${BIN_FOLDER}") clone --depth 1 -b $(echo "${GIT_BRANCH} ${GIT_REPO}") \ && git -C $(echo "${BIN_FOLDER}") clone --depth 1 -b $(echo "${GIT_BRANCH} ${GIT_REPO}") \
&& cd $WORKDIR cat && npm install && npm update \ && cd $WORKDIR && npm install && npm update \
&& rm .eslintrc.json \ && rm .eslintrc.json \
&& rm -rf .github \ && rm -rf .github \
&& rm -rf .git \ && rm -rf .git \
@ -33,6 +35,13 @@ RUN apk update \
&& rm sites/**/readme.md \ && rm sites/**/readme.md \
&& rm -rf sites/**/__data__ \ && rm -rf sites/**/__data__ \
&& rm sites/**/**.test.js \ && rm sites/**/**.test.js \
&& rm -rf node_modules/**/.package-lock.json \
&& rm -rf node_modules/**/.tsconfig.json \
&& rm -rf node_modules/**/.tsconfig.tsbuildinfo.json \
&& rm -rf node_modules/**/.github \
&& rm -rf node_modules/**/docs \
&& rm -rf node_modules/**/LICENSE \
&& rm -rf node_modules/**/**.md \
&& ln -s /config/channels.xml $(echo "${WORKDIR}/channels.xml") \ && ln -s /config/channels.xml $(echo "${WORKDIR}/channels.xml") \
&& mkdir /public && mkdir /public
COPY start.sh $WORKDIR COPY start.sh $WORKDIR
@ -40,5 +49,6 @@ COPY serve.json $WORKDIR
RUN chmod +x "$START_SCRIPT" \ RUN chmod +x "$START_SCRIPT" \
&& apk del git curl \ && apk del git curl \
&& rm -rf /var/cache/apk/* && rm -rf /var/cache/apk/*
ENTRYPOINT bash $START_SCRIPT chron-schedule="$CRON_SCHEDULE" work-dir="$WORKDIR" days="$DAYS" max_connections="$MAX_CONNECTIONS" SHELL ["/bin/bash", "-c"]
ENTRYPOINT bash $START_SCRIPT chron-schedule="$CRON_SCHEDULE" work-dir="$WORKDIR" days="$DAYS" max_connections="$MAX_CONNECTIONS" enable_fixes="$ENABLE_FIXES" api_url="$API_URL"
EXPOSE 3000 EXPOSE 3000

9
LICENSE Normal file
View File

@ -0,0 +1,9 @@
MIT License
Copyright (c) 2024 David Claeys
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -17,6 +17,7 @@ The `pm2` and `serve` packages are used in order to run the application in the c
### Paths ### Paths
#### Channels file
An example `channels.xml` is included by default in the image.<br> An example `channels.xml` is included by default in the image.<br>
```xml ```xml
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
@ -28,13 +29,35 @@ An example `channels.xml` is included by default in the image.<br>
However if you want to configure your own channels you need to provide your own configuration file.<br> However if you want to configure your own channels you need to provide your own configuration file.<br>
You can do this by creating a mapping in the `/config` folder. You can do this by creating a mapping in the `/config` folder.
#### Custom fixes
Through the `ENABLE_FIXES` variable custom provider fixes can be applied to the container.
By default some fixes are available. These fixes have been validated before being added to this repo.
However this option is disabled by default since you might only want to run the unmodified source.
If you have suggestions or a problem with them please submit an issue.
This the list of the provided custom fixes :
| Provider | Author(s) | Status |
|------------------|------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------|
| movistarplus.es | [davidclaeysquinones](https://github.com/davidclaeysquinones) | [PR](https://github.com/iptv-org/epg/pull/2440) pending approval |
| pickx.be | [davidclaeysquinones](https://github.com/davidclaeysquinones) and [BellezaEmporium](https://github.com/BellezaEmporium) | [PR](https://github.com/iptv-org/epg/pull/2480) pending approval |
| telenet.tv | [davidclaeysquinones](https://github.com/davidclaeysquinones) | [PR](https://github.com/iptv-org/epg/pull/2429) merged since commit [fd382db](https://github.com/iptv-org/epg/commit/fd382db08da7a96150928b8dcfef115e29e661d3) |
| web.magentatv.de | [klausellus-wallace](https://github.com/klausellus-wallace) | [PR](https://github.com/iptv-org/epg/pull/2458) pending approval |
If for some reason you want to include your own provider fixes this is possible by creation a mapping in the `/fixes` folder.<br>
The expected structure is */fixes/`provider_name`/`provider_name`.config.js*.<br>
It is recommended that you take existing provider code as a base for your customisations.
### Environment Variables ### Environment Variables
| Variable | Description | Default | | Variable | Description | Default |
|-------------------------------|----------------------------------------------------------------------------|------------------| |-------------------------------|----------------------------------------------------------------------------|----------------------------------|
| CRON_SCHEDULE | CRON expression describing the recurrence for epg retrieval. | `0 0,12 * * *` | | CRON_SCHEDULE | CRON expression describing the recurrence for epg retrieval. | `0 0,12 * * *` |
| DAYS | Describes the desired amount of days in the future for for epg retrieval. | 14 | | DAYS | Describes the desired amount of days in the future for for epg retrieval. | 14 |
| MAX_CONNECTIONS | The maximum amount of parallel connections that can be established | 10 | | MAX_CONNECTIONS | The maximum amount of parallel connections that can be established | 10 |
| ENABLE_FIXES | Some fixes to providers take a long time to be merged into the main branch.<br>When this option is enabled some of these fixes will also be included.<br>The source code for these fixes can be seen under the `fixes` folder.<br> Recreate the container when changing this variable in order for it to take effect | false |
| API_URL | The endpoint where channel information will be grabbed | `https://iptv-org.github.io/api` |
### Compose file ### Compose file
@ -43,7 +66,8 @@ version: '3.3'
services: services:
epg: epg:
image: git.claeyscloud.com/david/epg-info:latest image: git.claeyscloud.com/david/epg-info:latest
#image: image: git.claeyscloud.com/david/epg-info:latest:latest #image: ghcr.io/davidclaeysquinones/epg-info:latest
#image: davidquinonescl/epg-info:latest
volumes: volumes:
# add a mapping in order to add the channels file # add a mapping in order to add the channels file
- /docker/epg:/config - /docker/epg:/config
@ -52,12 +76,52 @@ services:
environment: environment:
# specify the time zone for the server # specify the time zone for the server
- TZ=Etc/UTC - TZ=Etc/UTC
# uncomment the underlying line if you want to enable custom fixes
#- ENABLE_FIXES=true
restart: unless-stopped restart: unless-stopped
``` ```
### Versions ### Versions
- 1.0.0 This image is bound to the content of the [iptv-org/epg](https://github.com/iptv-org/epg) repository. In the underlying list you can see to which commit each version of the docker image is bound.
Normally when a change is made in the source repository the documentation is updated and a new tag is created in this repository. This is completely normal since the source repository is only cloned during the build process of the docker image.
Sometimes a new version of this image will be bound to the same source commit. This will happen when improvements are made to the image.
- 1.0.0 &nbsp;
[08-01-2024](https://github.com/iptv-org/epg/commit/793c74ca397504fc2afc8fbfa998e0b8e4ca45d9) [08-01-2024](https://github.com/iptv-org/epg/commit/793c74ca397504fc2afc8fbfa998e0b8e4ca45d9)
- 1.0.1 - 1.0.1 &nbsp;
[08-14-2024](https://github.com/iptv-org/epg/commit/270e85cfae6f0f691c2e6ab7ce511d60fd687565) [08-14-2024](https://github.com/iptv-org/epg/commit/270e85cfae6f0f691c2e6ab7ce511d60fd687565)
- 1.0.2 &nbsp;
[09-07-2024](https://github.com/iptv-org/epg/commit/4e3b06a86e225cdd1b9362a683e6770fb68ff28f)
- 1.0.3 &nbsp;
[09-14-2024](https://github.com/iptv-org/epg/commit/c69f3c93b1123ddf0fecc62c7067fced59ae4e99)
- 1.0.4 &nbsp;
[09-30-2024](https://github.com/iptv-org/epg/commit/d90c7a54b941238cb92391b33d80a75e746d3002)
- 1.0.5 &nbsp;
[10-02-2024](https://github.com/iptv-org/epg/commit/713dbf60a1cb9623ffcab6ab370ee9a78b32102b)
- 1.0.6 &nbsp;
[10-02-2024](https://github.com/iptv-org/epg/commit/713dbf60a1cb9623ffcab6ab370ee9a78b32102b)<br>Adds possibility to enable custom fixes
- 1.0.7 &nbsp;
[10-02-2024](https://github.com/iptv-org/epg/commit/713dbf60a1cb9623ffcab6ab370ee9a78b32102b)<br>Adds improvement to the docker image size
- 1.0.8 &nbsp;
[10-10-2024](https://github.com/iptv-org/epg/commit/2241bc261fd37b8b16e036a0b61167030a5ce2e6)
- 1.0.9 &nbsp;
[10-12-2024](https://github.com/iptv-org/epg/commit/fd382db08da7a96150928b8dcfef115e29e661d3)
- 1.0.10
[10-14-2024 12:50](https://github.com/iptv-org/epg/commit/a3e7661f95103cbee4bcb78bd483396680e9abfc)
- 1.0.11
[10-14-2024 17:34](https://github.com/iptv-org/epg/commit/7610f7b9f5cc1ccab8d17f3408a95d31b36ace7c)
- 1.0.12
[10-14-2024](https://github.com/iptv-org/epg/commit/7610f7b9f5cc1ccab8d17f3408a95d31b36ace7c)<br>Fix Pickx.be url
- 1.0.13
[10-14-2024](https://github.com/iptv-org/epg/commit/7610f7b9f5cc1ccab8d17f3408a95d31b36ace7c)<br>Add custom fix for web.magentatv.de
- 1.0.14
[10-14-2024](https://github.com/iptv-org/epg/commit/7610f7b9f5cc1ccab8d17f3408a95d31b36ace7c)<br>Change fix for movistarplus.es in order to work with new API
- 1.0.15
[11-26-2024](https://github.com/iptv-org/epg/commit/d15911006e163262c0c7f267deae28160c0d7a8f)<br>Add option to customize channel endpoint
- 1.0.16
[11-26-2024](https://github.com/iptv-org/epg/commit/d15911006e163262c0c7f267deae28160c0d7a8f)<br>Fix icons for movistarplus.es
- 1.0.17
[11-26-2024](https://github.com/iptv-org/epg/commit/d15911006e163262c0c7f267deae28160c0d7a8f)<br>Update fix for pickx.be

View File

@ -0,0 +1,78 @@
const { DateTime } = require('luxon')
const API_PROGRAM_ENDPOINT = 'https://comunicacion.movistarplus.es'
const API_IMAGE_ENDPOINT = 'https://www.movistarplus.es/recorte/n/externov';
module.exports = {
site: 'movistarplus.es',
days: 2,
url: function ({ channel, date }) {
return `${API_PROGRAM_ENDPOINT}/wp-admin/admin-ajax.php`
},
request: {
method: 'POST',
headers: {
Origin: API_PROGRAM_ENDPOINT,
Referer: `${API_PROGRAM_ENDPOINT}/programacion/`,
"Content-Type" : 'application/x-www-form-urlencoded; charset=UTF-8',
},
data: function ({ channel, date }) {
return {
action: 'getProgramation',
day: date.format('YYYY-MM-DD'),
"channels[]": channel.site_id,
};
},
},
parser({ content, channel, date }) {
let programs = []
let items = parseItems(content, channel);
if (!items.length) return programs;
items.forEach(item => {
let startTime = DateTime.fromFormat(
`${item.f_evento_rejilla}`,
'yyyy-MM-dd HH:mm:ss',
{ zone: 'Europe/Madrid' }
).toUTC();
let stopTime = DateTime.fromFormat(
`${item.f_fin_evento_rejilla}`,
'yyyy-MM-dd HH:mm:ss',
{ zone: 'Europe/Madrid' }
).toUTC()
// Adjust stop time if it's on the next day
if (stopTime < startTime) {
stopTime = stopTime.plus({ days: 1 });
}
programs.push({
title: item.des_evento_rejilla,
icon: parseIcon(item, channel),
category: item.des_genero,
start: startTime,
stop: stopTime,
})
})
return programs
},
}
function parseIcon(item, channel) {
if(item.cod_elemento_emision)
{
return `${API_IMAGE_ENDPOINT}/M${channel.site_id}P${item.cod_elemento_emision}`
}
return ''
}
function parseItems(content, channel) {
const json = typeof content === 'string' ? JSON.parse(content) : content;
const data = json.channelsProgram;
if (data.length !== 1) return [];
return data[0];
}

View File

@ -0,0 +1,197 @@
// credit for this fix goes to davidclaeysquinones for his PR on https://github.com/iptv-org/epg/pull/2430 and to BellezaEmporium for his PR on https://github.com/iptv-org/epg/pull/2480
const axios = require('axios')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
let apiVersion
let isApiVersionFetched = false
;(async () => {
try {
await fetchApiVersion()
isApiVersionFetched = true
} catch (error) {
console.error('Error during script initialization:', error)
}
})()
dayjs.extend(utc)
module.exports = {
site: 'pickx.be',
days: 2,
apiVersion: function () {
return apiVersion
},
fetchApiVersion: fetchApiVersion, // Export fetchApiVersion
url: async function ({ channel, date }) {
while (!isApiVersionFetched) {
await new Promise(resolve => setTimeout(resolve, 100)) // Wait for 100 milliseconds
}
return `https://px-epg.azureedge.net/airings/${apiVersion}/${date.format(
'YYYY-MM-DD'
)}/channel/${channel.site_id}?timezone=Europe%2FBrussels`
},
request: {
headers: {
Origin: 'https://www.pickx.be',
Referer: 'https://www.pickx.be/'
}
},
parser({ channel, content }) {
const programs = []
if (content) {
const items = JSON.parse(content)
items.forEach(item => {
programs.push({
title: item.program.title,
sub_title: item.program.episodeTitle,
description: item.program.description,
category: item.program.translatedCategory?.[channel.lang]
? item.program.translatedCategory[channel.lang]
: item.program.category.split('.')[1],
image: item.program.posterFileName
? `https://experience-cache.proximustv.be/posterserver/poster/EPG/w-166_h-110/${item.program.posterFileName}`
: null,
season: item.program.seasonNumber,
episode: item.program.episodeNumber,
actors: item.program.actors,
director: item.program.director ? [item.program.director] : null,
start: dayjs.utc(item.programScheduleStart),
stop: dayjs.utc(item.programScheduleEnd)
})
})
}
return programs
},
async channels({ lang = '' }) {
const query = {
operationName: 'getChannels',
variables: {
language: lang,
queryParams: {},
id: '0',
params: {
shouldReadFromCache: true
}
},
query: `query getChannels($language: String!, $queryParams: ChannelQueryParams, $id: String, $params: ChannelParams) {
channels(language: $language, queryParams: $queryParams, id: $id, params: $params) {
id
channelReferenceNumber
name
callLetter
number
logo {
key
url
__typename
}
language
hd
radio
replayable
ottReplayable
playable
ottPlayable
recordable
subscribed
cloudRecordable
catchUpWindowInHours
isOttNPVREnabled
ottNPVRStart
subscription {
channelRef
subscribed
upselling {
upsellable
packages
__typename
}
__typename
}
packages
__typename
}
}`
}
const result = await axios
.post('https://api.proximusmwc.be/tiams/v2/graphql', query)
.then(r => r.data)
.catch(console.error)
return (
result?.data?.channels
.filter(
channel =>
!channel.radio && (!lang || channel.language === (lang === 'de' ? 'ger' : lang))
)
.map(channel => {
return {
lang: channel.language === 'ger' ? 'de' : channel.language,
site_id: channel.id,
name: channel.name
}
}) || []
)
}
}
function fetchApiVersion() {
return new Promise(async (resolve, reject) => {
try {
// load pickx bundle and get react version hash (regex).
// it's not the best way to get the version but it's the only way to get it.
// find bundle version
const minBundleVer = "https://www.pickx.be/minimal-bundle-version"
const bundleVerData = await axios.get(minBundleVer, {
headers: {
Origin: 'https://www.pickx.be',
Referer: 'https://www.pickx.be/'
}
})
if (bundleVerData.status !== 200) {
console.error(`Failed to fetch bundle version. Status: ${bundleVerData.status}`)
reject(`Failed to fetch bundle version. Status: ${bundleVerData.status}`)
} else {
const bundleVer = bundleVerData.data.version
// get the minified JS app bundle
const bundleUrl = `https://components.pickx.be/pxReactPlayer/${bundleVer}/bundle.min.js`
// now, find the react hash inside the bundle URL
const bundle = await axios.get(bundleUrl).then(r => {
const re = /REACT_APP_VERSION_HASH:"([^"]+)"/
const match = r.data.match(re)
if (match && match[1]) {
return match[1]
} else {
throw new Error('React app version hash not found')
}
}).catch(console.error)
const versionUrl = `https://www.pickx.be/api/s-${bundle.replace('/REACT_APP_VERSION_HASH:"', '')}`
const response = await axios.get(versionUrl, {
headers: {
Origin: 'https://www.pickx.be',
Referer: 'https://www.pickx.be/'
}
})
if (response.status === 200) {
apiVersion = response.data.version
resolve()
} else {
console.error(`Failed to fetch API version. Status: ${response.status}`)
reject(`Failed to fetch API version. Status: ${response.status}`)
}
}
} catch (error) {
console.error('Error during fetchApiVersion:', error)
reject(error)
}
})
}

View File

@ -0,0 +1,140 @@
// credit for this fix goes to davidclaeysquinones for his PR on https://github.com/iptv-org/epg/pull/2429
const axios = require('axios')
const dayjs = require('dayjs')
const API_STATIC_ENDPOINT = 'https://static.spark.telenet.tv/eng/web/epg-service-lite/be'
const API_PROD_ENDPOINT = 'https://spark-prod-be.gnp.cloud.telenet.tv/eng/web/linear-service/v2'
const API_IMAGE_ENDPOINT = 'https://staticqbr-prod-be.gnp.cloud.telenet.tv/image-service';
module.exports = {
site: 'telenet.tv',
days: 2,
request: {
cache: {
ttl: 60 * 60 * 1000 // 1 hour
}
},
url: function ({ date, channel }) {
return `${API_STATIC_ENDPOINT}/${channel.lang}/events/segments/${date.format('YYYYMMDDHHmmss')}`
},
async parser({ content, channel, date }) {
let programs = []
let items = parseItems(content, channel)
if (!items.length) return programs
const promises = [
axios.get(
`${API_STATIC_ENDPOINT}/${channel.lang}/events/segments/${date
.add(6, 'h')
.format('YYYYMMDDHHmmss')}`,
{
responseType: 'arraybuffer'
}
),
axios.get(
`${API_STATIC_ENDPOINT}/${channel.lang}/events/segments/${date
.add(12, 'h')
.format('YYYYMMDDHHmmss')}`,
{
responseType: 'arraybuffer'
}
),
axios.get(
`${API_STATIC_ENDPOINT}/${channel.lang}/events/segments/${date
.add(18, 'h')
.format('YYYYMMDDHHmmss')}`,
{
responseType: 'arraybuffer'
}
)
]
await Promise.allSettled(promises)
.then(results => {
results.forEach(r => {
if (r.status === 'fulfilled') {
const parsed = parseItems(r.value.data, channel)
items = items.concat(parsed)
}
})
})
.catch(console.error)
for (let item of items) {
const detail = await loadProgramDetails(item, channel)
programs.push({
title: item.title,
icon: parseIcon(item),
description: detail.longDescription,
category: detail.genres,
actors: detail.actors,
season: parseSeason(detail),
episode: parseEpisode(detail),
start: parseStart(item),
stop: parseStop(item)
})
}
return programs
},
async channels() {
const data = await axios
.get(`${API_PROD_ENDPOINT}/channels?cityId=28001&language=en&productClass=Orion-DASH`)
.then(r => r.data)
.catch(console.log)
return data.map(item => {
return {
lang: 'nl',
site_id: item.id,
name: item.name
}
})
}
}
async function loadProgramDetails(item, channel) {
if (!item.id) return {}
const url = `${API_PROD_ENDPOINT}/replayEvent/${item.id}?returnLinearContent=true&language=${channel.lang}`
const data = await axios
.get(url)
.then(r => r.data)
.catch(console.log)
return data || {}
}
function parseStart(item) {
return dayjs.unix(item.startTime)
}
function parseStop(item) {
return dayjs.unix(item.endTime)
}
function parseItems(content, channel) {
if (!content) return []
const data = JSON.parse(content)
if (!data || !Array.isArray(data.entries)) return []
const channelData = data.entries.find(e => e.channelId === channel.site_id)
if (!channelData) return []
return Array.isArray(channelData.events) ? channelData.events : []
}
function parseSeason(detail) {
if (!detail.seasonNumber) return null
if (String(detail.seasonNumber).length > 2) return null
return detail.seasonNumber
}
function parseEpisode(detail) {
if (!detail.episodeNumber) return null
if (String(detail.episodeNumber).length > 3) return null
return detail.episodeNumber
}
function parseIcon(item) {
return `${API_IMAGE_ENDPOINT}/intent/${item.id}/posterTile`;
}

View File

@ -0,0 +1,232 @@
// credit for this fix goes to klausellus-wallace for his PR on https://github.com/iptv-org/epg/pull/2458
const axios = require('axios')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
const customParseFormat = require('dayjs/plugin/customParseFormat')
const fetch = require('node-fetch')
const { upperCase } = require('lodash')
let X_CSRFTOKEN
let COOKIE
const cookiesToExtract = ['JSESSIONID', 'CSESSIONID', 'CSRFSESSION']
const extractedCookies = {}
dayjs.extend(utc)
dayjs.extend(customParseFormat)
module.exports = {
site: 'web.magentatv.de',
days: 2,
url: 'https://api.prod.sngtv.magentatv.de/EPG/JSON/PlayBillList',
request: {
method: 'POST',
headers: function () {
return setHeaders()
},
data({ channel, date }) {
return {
count: -1,
isFillProgram: 1,
offset: 0,
properties: [
{
include: 'endtime,genres,id,name,starttime,channelid,pictures,introduce,subName,seasonNum,subNum,cast,country,producedate,externalIds',
name: 'playbill'
}
],
type: 2,
begintime: date.format('YYYYMMDD000000'),
channelid: channel.site_id,
endtime: date.add(1, 'd').format('YYYYMMDD000000')
}
}
},
parser: function ({ content }) {
let programs = []
const items = parseItems(content)
items.forEach(item => {
programs.push({
title: item.name,
description: item.introduce,
image: parseImage(item),
category: parseCategory(item),
start: parseStart(item),
stop: parseStop(item),
sub_title: item.subName,
season: item.seasonNum,
episode: item.subNum,
directors: parseDirectors(item),
producers: parseProducers(item),
adapters: parseAdapters(item),
country: upperCase(item.country),
date: item.producedate,
urls: parseUrls(item)
})
})
return programs
},
async channels() {
const url = 'https://api.prod.sngtv.magentatv.de/EPG/JSON/AllChannel'
const body = {
channelNamespace: 2,
filterlist: [
{
key: 'IsHide',
value: '-1'
}
],
metaDataVer: 'Channel/1.1',
properties: [
{
include: '/channellist/logicalChannel/contentId,/channellist/logicalChannel/name',
name: 'logicalChannel'
}
],
returnSatChannel: 0
}
const params = {
headers: await setHeaders()
}
const data = await axios
.post(url, body, params)
.then(r => r.data)
.catch(console.log)
return data.channellist.map(item => {
return {
lang: 'de',
site_id: item.contentId,
name: item.name
}
})
}
}
function parseCategory(item) {
return item.genres
? item.genres
.replace('und', ',')
.split(',')
.map(i => i.trim())
: []
}
function parseDirectors(item) {
if (!item.cast || !item.cast.director) return [];
return item.cast.director
.replace('und', ',')
.split(',')
.map(i => i.trim());
}
function parseProducers(item) {
if (!item.cast || !item.cast.producer) return [];
return item.cast.producer
.replace('und', ',')
.split(',')
.map(i => i.trim())
}
function parseAdapters(item) {
if (!item.cast || !item.cast.adaptor) return [];
return item.cast.adaptor
.replace('und', ',')
.split(',')
.map(i => i.trim())
}
function parseUrls(item) {
// currently only a imdb id is returned by the api, thus we can construct the url here
if (!item.externalIds) return [];
return JSON.parse(item.externalIds)
.filter(externalId => externalId.type === 'imdb' && externalId.id)
.map(externalId => ({ system: 'imdb', value: `https://www.imdb.com/title/${externalId.id}` }))
}
function parseImage(item) {
if (!Array.isArray(item.pictures) || !item.pictures.length) return null
return item.pictures[0].href
}
function parseStart(item) {
return dayjs.utc(item.starttime, 'YYYY-MM-DD HH:mm:ss')
}
function parseStop(item) {
return dayjs.utc(item.endtime, 'YYYY-MM-DD HH:mm:ss')
}
function parseItems(content) {
const data = JSON.parse(content)
if (!data || !Array.isArray(data.playbilllist)) return []
return data.playbilllist
}
// Function to try to fetch COOKIE and X_CSRFTOKEN
function fetchCookieAndToken() {
return fetch(
'https://api.prod.sngtv.magentatv.de/EPG/JSON/Authenticate?SID=firstup&T=Windows_chrome_118',
{
headers: {
accept: 'application/json, text/javascript, */*; q=0.01',
'content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'x-requested-with': 'XMLHttpRequest',
Referer: 'https://web.magentatv.de/',
'Referrer-Policy': 'strict-origin-when-cross-origin'
},
body: '{"terminalid":"00:00:00:00:00:00","mac":"00:00:00:00:00:00","terminaltype":"WEBTV","utcEnable":1,"timezone":"Etc/GMT0","userType":3,"terminalvendor":"Unknown"}',
method: 'POST'
}
)
.then(response => {
// Check if the response status is OK (2xx)
if (!response.ok) {
throw new Error('HTTP request failed')
}
// Extract the set-cookie header
const setCookieHeader = response.headers.raw()['set-cookie']
// Extract the cookies specified in cookiesToExtract
cookiesToExtract.forEach(cookieName => {
const regex = new RegExp(`${cookieName}=(.+?)(;|$)`)
const match = setCookieHeader.find(header => regex.test(header))
if (match) {
const cookieValue = regex.exec(match)[1]
extractedCookies[cookieName] = cookieValue
}
})
return response.json()
})
.then(data => {
if (data.csrfToken) {
X_CSRFTOKEN = data.csrfToken
COOKIE = `JSESSIONID=${extractedCookies.JSESSIONID}; CSESSIONID=${extractedCookies.CSESSIONID}; CSRFSESSION=${extractedCookies.CSRFSESSION}; JSESSIONID=${extractedCookies.JSESSIONID};`
} else {
console.log('csrfToken not found in the response.')
}
})
.catch(error => {
console.error(error)
})
}
function setHeaders() {
return fetchCookieAndToken().then(() => {
return {
X_CSRFTOKEN: X_CSRFTOKEN,
'Content-Type': 'application/json',
Cookie: COOKIE
}
})
}

View File

@ -7,6 +7,8 @@ for arg in "$@"; do
work-dir=*) work_dir="${arg#*=}" ;; work-dir=*) work_dir="${arg#*=}" ;;
days=*) days="${arg#*=}" ;; days=*) days="${arg#*=}" ;;
max_connections=*) max_connections="${arg#*=}" ;; max_connections=*) max_connections="${arg#*=}" ;;
enable_fixes=*) enable_fixes="${arg#*=}" ;;
api_url=*) api_url="${arg#*=}" ;;
esac esac
done done
@ -15,6 +17,14 @@ cd $work_dir
echo "working dir : " $(pwd) echo "working dir : " $(pwd)
echo "days : ${days}" echo "days : ${days}"
echo "max_connections : ${max_connections}" echo "max_connections : ${max_connections}"
echo "enable_fixes : ${enable_fixes}"
echo "api url : ${api_url}"
if [ "$enable_fixes" = true ] ; then
cp -R /fixes/* /bin/epg/sites/
fi
sed -i -E "s/(https:\x2f\x2fiptv-org.github.io\x2fapi)/$api_url/g" $work_dir/scripts/core/apiClient.ts
pm2 --name epg start npm -- run serve pm2 --name epg start npm -- run serve
npm run grab -- --channels=channels.xml --maxConnections=$max_connections --days=$days --gzip npm run grab -- --channels=channels.xml --maxConnections=$max_connections --days=$days --gzip