Merge pull request #468 from Dispatcharr:dev

# Dispatcharr Release – Version 0.10.1
This commit is contained in:
SergeantPanda 2025-09-24 16:39:30 -05:00 committed by GitHub
commit 86344b43ba
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 2610 additions and 793 deletions

View file

@ -0,0 +1,18 @@
# Generated by Django 5.2.4 on 2025-09-24 21:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('epg', '0016_epgdata_icon_url'),
]
operations = [
migrations.AlterField(
model_name='epgsource',
name='url',
field=models.URLField(blank=True, max_length=1000, null=True),
),
]

View file

@ -28,7 +28,7 @@ class EPGSource(models.Model):
name = models.CharField(max_length=255, unique=True)
source_type = models.CharField(max_length=20, choices=SOURCE_TYPE_CHOICES)
url = models.URLField(blank=True, null=True) # For XMLTV
url = models.URLField(max_length=1000, blank=True, null=True) # For XMLTV
api_key = models.CharField(max_length=255, blank=True, null=True) # For Schedules Direct
is_active = models.BooleanField(default=True)
file_path = models.CharField(max_length=1024, blank=True, null=True)

View file

@ -28,6 +28,23 @@ from core.utils import acquire_task_lock, release_task_lock, send_websocket_upda
logger = logging.getLogger(__name__)
def validate_icon_url_fast(icon_url, max_length=None):
"""
Fast validation for icon URLs during parsing.
Returns None if URL is too long, original URL otherwise.
If max_length is None, gets it dynamically from the EPGData model field.
"""
if max_length is None:
# Get max_length dynamically from the model field
max_length = EPGData._meta.get_field('icon_url').max_length
if icon_url and len(icon_url) > max_length:
logger.warning(f"Icon URL too long ({len(icon_url)} > {max_length}), skipping: {icon_url[:100]}...")
return None
return icon_url
MAX_EXTRACT_CHUNK_SIZE = 65536 # 64kb (base2)
@ -831,6 +848,7 @@ def parse_channels_only(source):
processed_channels = 0
batch_size = 500 # Process in batches to limit memory usage
progress = 0 # Initialize progress variable here
icon_url_max_length = EPGData._meta.get_field('icon_url').max_length # Get max length for icon_url field
# Track memory at key points
if process:
@ -875,10 +893,11 @@ def parse_channels_only(source):
display_name = None
icon_url = None
for child in elem:
if child.tag == 'display-name' and child.text:
if display_name is None and child.tag == 'display-name' and child.text:
display_name = child.text.strip()
elif child.tag == 'icon':
icon_url = child.get('src', '').strip()
raw_icon_url = child.get('src', '').strip()
icon_url = validate_icon_url_fast(raw_icon_url, icon_url_max_length)
if display_name and icon_url:
break # No need to continue if we have both

View file

@ -903,6 +903,8 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
stream_hashes = {}
logger.debug(f"Processing batch of {len(batch)} for M3U account {account_id}")
if compiled_filters:
logger.debug(f"Using compiled filters: {[f[1].regex_pattern for f in compiled_filters]}")
for stream_info in batch:
try:
name, url = stream_info["name"], stream_info["url"]
@ -912,10 +914,10 @@ def process_m3u_batch_direct(account_id, batch, groups, hash_keys):
group_title = get_case_insensitive_attr(
stream_info["attributes"], "group-title", "Default Group"
)
logger.debug(f"Processing stream: {name} - {url} in group {group_title}")
include = True
for pattern, filter in compiled_filters:
logger.debug(f"Checking filter patterh {pattern}")
logger.trace(f"Checking filter pattern {pattern}")
target = name
if filter.filter_type == "url":
target = url
@ -2071,13 +2073,13 @@ def get_transformed_credentials(account, profile=None):
base_url = account.server_url
base_username = account.username
base_password = account.password # Build a complete URL with credentials (similar to how IPTV URLs are structured)
# Format: http://server.com:port/username/password/rest_of_path
# Format: http://server.com:port/live/username/password/1234.ts
if base_url and base_username and base_password:
# Remove trailing slash from server URL if present
clean_server_url = base_url.rstrip('/')
# Build the complete URL with embedded credentials
complete_url = f"{clean_server_url}/{base_username}/{base_password}/"
complete_url = f"{clean_server_url}/live/{base_username}/{base_password}/1234.ts"
logger.debug(f"Built complete URL: {complete_url}")
# Apply profile-specific transformations if profile is provided
@ -2091,14 +2093,14 @@ def get_transformed_credentials(account, profile=None):
logger.info(f"Transformed complete URL: {complete_url} -> {transformed_complete_url}")
# Extract components from the transformed URL
# Pattern: http://server.com:port/username/password/
# Pattern: http://server.com:port/live/username/password/1234.ts
parsed_url = urllib.parse.urlparse(transformed_complete_url)
path_parts = [part for part in parsed_url.path.split('/') if part]
if len(path_parts) >= 2:
# Extract username and password from path
transformed_username = path_parts[0]
transformed_password = path_parts[1]
transformed_username = path_parts[1]
transformed_password = path_parts[2]
# Rebuild server URL without the username/password path
transformed_url = f"{parsed_url.scheme}://{parsed_url.netloc}"

File diff suppressed because it is too large Load diff

View file

@ -7,7 +7,9 @@
"dev": "vite --host",
"build": "vite build",
"lint": "eslint .",
"preview": "vite preview"
"preview": "vite preview",
"test": "vitest --run",
"test:watch": "vitest"
},
"dependencies": {
"@dnd-kit/core": "^6.3.1",
@ -43,6 +45,9 @@
},
"devDependencies": {
"@eslint/js": "^9.21.0",
"@testing-library/jest-dom": "^6.8.0",
"@testing-library/react": "^16.3.0",
"@testing-library/user-event": "^14.6.1",
"@types/react": "^19.0.10",
"@types/react-dom": "^19.0.4",
"@vitejs/plugin-react-swc": "^3.8.0",
@ -50,7 +55,9 @@
"eslint-plugin-react-hooks": "^5.1.0",
"eslint-plugin-react-refresh": "^0.4.19",
"globals": "^15.15.0",
"jsdom": "^27.0.0",
"prettier": "^3.5.3",
"vite": "^6.2.0"
"vite": "^6.2.0",
"vitest": "^3.2.4"
}
}

View file

@ -51,8 +51,7 @@ const M3UFilter = ({ filter = null, m3u, isOpen, onClose }) => {
values.custom_properties = setCustomProperty(
filter ? filter.custom_properties : {},
'case_sensitive',
values.case_sensitive,
true
values.case_sensitive
);
delete values.case_sensitive;

View file

@ -282,7 +282,7 @@ const StreamsTable = () => {
cell: ({ getValue }) => (
<Box
style={{
whiteSpace: 'nowrap',
whiteSpace: 'pre',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}
@ -301,7 +301,7 @@ const StreamsTable = () => {
cell: ({ getValue }) => (
<Box
style={{
whiteSpace: 'nowrap',
whiteSpace: 'pre',
overflow: 'hidden',
textOverflow: 'ellipsis',
}}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,100 @@
import { describe, it, expect } from 'vitest';
import dayjs from 'dayjs';
import {
PROGRAM_HEIGHT,
EXPANDED_PROGRAM_HEIGHT,
buildChannelIdMap,
mapProgramsByChannel,
computeRowHeights,
} from '../guideUtils.js';
describe('guideUtils', () => {
describe('buildChannelIdMap', () => {
it('maps tvg ids from epg records and falls back to channel uuid', () => {
const channels = [
{ id: 1, epg_data_id: 'epg-1', uuid: 'uuid-1' },
{ id: 2, epg_data_id: null, uuid: 'uuid-2' },
];
const tvgsById = {
'epg-1': { tvg_id: 'alpha' },
};
const map = buildChannelIdMap(channels, tvgsById);
expect(map.get('alpha')).toBe(1);
expect(map.get('uuid-2')).toBe(2);
});
});
describe('mapProgramsByChannel', () => {
it('groups programs by channel and sorts them by start time', () => {
const programs = [
{
id: 10,
tvg_id: 'alpha',
start_time: dayjs('2025-01-01T02:00:00Z').toISOString(),
end_time: dayjs('2025-01-01T03:00:00Z').toISOString(),
title: 'Late Show',
},
{
id: 11,
tvg_id: 'alpha',
start_time: dayjs('2025-01-01T01:00:00Z').toISOString(),
end_time: dayjs('2025-01-01T02:00:00Z').toISOString(),
title: 'Evening News',
},
{
id: 20,
tvg_id: 'beta',
start_time: dayjs('2025-01-01T00:00:00Z').toISOString(),
end_time: dayjs('2025-01-01T01:00:00Z').toISOString(),
title: 'Morning Show',
},
];
const channelIdByTvgId = new Map([
['alpha', 1],
['beta', 2],
]);
const map = mapProgramsByChannel(programs, channelIdByTvgId);
expect(map.get(1)).toHaveLength(2);
expect(map.get(1)?.map((item) => item.id)).toEqual([11, 10]);
expect(map.get(2)).toHaveLength(1);
expect(map.get(2)?.[0].startMs).toBeTypeOf('number');
expect(map.get(2)?.[0].endMs).toBeTypeOf('number');
});
});
describe('computeRowHeights', () => {
it('returns program heights with expanded rows when needed', () => {
const filteredChannels = [
{ id: 1 },
{ id: 2 },
];
const programsByChannel = new Map([
[1, [{ id: 10 }, { id: 11 }]],
[2, [{ id: 20 }]],
]);
const collapsed = computeRowHeights(
filteredChannels,
programsByChannel,
null
);
expect(collapsed).toEqual([PROGRAM_HEIGHT, PROGRAM_HEIGHT]);
const expanded = computeRowHeights(
filteredChannels,
programsByChannel,
10
);
expect(expanded).toEqual([
EXPANDED_PROGRAM_HEIGHT,
PROGRAM_HEIGHT,
]);
});
});
});

View file

@ -0,0 +1,79 @@
import dayjs from 'dayjs';
export const PROGRAM_HEIGHT = 90;
export const EXPANDED_PROGRAM_HEIGHT = 180;
export function buildChannelIdMap(channels, tvgsById) {
const map = new Map();
channels.forEach((channel) => {
const tvgRecord = channel.epg_data_id
? tvgsById[channel.epg_data_id]
: null;
const tvgId = tvgRecord?.tvg_id ?? channel.uuid;
if (tvgId) {
const tvgKey = String(tvgId);
if (!map.has(tvgKey)) {
map.set(tvgKey, []);
}
map.get(tvgKey).push(channel.id);
}
});
return map;
}
export function mapProgramsByChannel(programs, channelIdByTvgId) {
if (!programs?.length || !channelIdByTvgId?.size) {
return new Map();
}
const map = new Map();
programs.forEach((program) => {
const channelIds = channelIdByTvgId.get(String(program.tvg_id));
if (!channelIds || channelIds.length === 0) {
return;
}
const startMs = program.startMs ?? dayjs(program.start_time).valueOf();
const endMs = program.endMs ?? dayjs(program.end_time).valueOf();
const programData = {
...program,
startMs,
endMs,
};
// Add this program to all channels that share the same TVG ID
channelIds.forEach((channelId) => {
if (!map.has(channelId)) {
map.set(channelId, []);
}
map.get(channelId).push(programData);
});
});
map.forEach((list) => {
list.sort((a, b) => a.startMs - b.startMs);
});
return map;
}
export function computeRowHeights(
filteredChannels,
programsByChannelId,
expandedProgramId,
defaultHeight = PROGRAM_HEIGHT,
expandedHeight = EXPANDED_PROGRAM_HEIGHT
) {
if (!filteredChannels?.length) {
return [];
}
return filteredChannels.map((channel) => {
const channelPrograms = programsByChannelId.get(channel.id) || [];
const expanded = channelPrograms.some(
(program) => program.id === expandedProgramId
);
return expanded ? expandedHeight : defaultHeight;
});
}

View file

@ -0,0 +1,42 @@
import '@testing-library/jest-dom/vitest';
import { afterEach, vi } from 'vitest';
import { cleanup } from '@testing-library/react';
afterEach(() => {
cleanup();
});
if (typeof window !== 'undefined' && !window.matchMedia) {
window.matchMedia = vi.fn().mockImplementation((query) => ({
matches: false,
media: query,
onchange: null,
addListener: vi.fn(),
removeListener: vi.fn(),
addEventListener: vi.fn(),
removeEventListener: vi.fn(),
dispatchEvent: vi.fn(),
}));
}
if (typeof window !== 'undefined' && !window.ResizeObserver) {
class ResizeObserver {
constructor(callback) {
this.callback = callback;
}
observe() {}
unobserve() {}
disconnect() {}
}
window.ResizeObserver = ResizeObserver;
}
if (typeof window !== 'undefined') {
if (!window.requestAnimationFrame) {
window.requestAnimationFrame = (cb) => setTimeout(cb, 16);
}
if (!window.cancelAnimationFrame) {
window.cancelAnimationFrame = (id) => clearTimeout(id);
}
}

View file

@ -26,4 +26,10 @@ export default defineConfig({
// },
// },
},
test: {
environment: 'jsdom',
setupFiles: ['./src/test/setupTests.js'],
globals: true,
},
});